diff --git a/codegenerator/cli/templates/static/codegen/src/GqlDbCustomTypes.res b/codegenerator/cli/templates/static/codegen/src/GqlDbCustomTypes.res index f23f36023..62e24ead9 100644 --- a/codegenerator/cli/templates/static/codegen/src/GqlDbCustomTypes.res +++ b/codegenerator/cli/templates/static/codegen/src/GqlDbCustomTypes.res @@ -1,20 +1,17 @@ -// In postgres floats are stored as numeric, which get returned to us as strtings. So the decoder / encoder need to do that for us. module Float = { @genType type t = float - let schema = - S.string - ->S.setName("GqlDbCustomTypes.Float") - ->S.transform(s => { - parser: string => { - switch string->Belt.Float.fromString { - | Some(db) => db - | None => s.fail("The string is not valid GqlDbCustomTypes.Float") - } - }, - serializer: float => float->Js.Float.toString, - }) + external fromStringUnsafe: string => float = "Number" + + let schema = S.union([ + S.float, + //This is needed to parse entity history json fields + S.string->S.transform(_s => { + parser: string => string->fromStringUnsafe, + serializer: Utils.magic, + }), + ])->S.setName("GqlDbCustomTypes.Float") } // Schema allows parsing strings or numbers to ints @@ -28,6 +25,7 @@ module Int = { let schema = S.union([ S.int, + //This is needed to parse entity history json fields S.string->S.transform(_s => { parser: string => string->fromStringUnsafe, serializer: Utils.magic, diff --git a/scenarios/test_codegen/test/SerDe_Test.res b/scenarios/test_codegen/test/SerDe_Test.res index 1bc34147f..e04d60ce6 100644 --- a/scenarios/test_codegen/test/SerDe_Test.res +++ b/scenarios/test_codegen/test/SerDe_Test.res @@ -41,10 +41,25 @@ describe("SerDe Test", () => { // arrayOfTimestamps: [], } + //Fails if serialziation does not work let set = DbFunctionsEntities.batchSet(~entityMod=module(Entities.EntityWithAllTypes)) - + //Fails if parsing does not work + let read = DbFunctionsEntities.batchRead(~entityMod=module(Entities.EntityWithAllTypes)) //set the entity - await DbFunctions.sql->set([entity]) + switch await DbFunctions.sql->set([entity]) { + | exception exn => + Js.log(exn) + Assert.fail("Failed to set entity in table") + | _ => () + } + + switch await DbFunctions.sql->read([entity.id]) { + | exception exn => + Js.log(exn) + Assert.fail("Failed to read entity from table") + | [_entity] => () + | _ => Assert.fail("Should have returned a row on batch read fn") + } //The copy function will do it's custom postgres serialization of the entity await DbFunctions.sql->DbFunctions.EntityHistory.copyAllEntitiesToEntityHistory @@ -54,12 +69,16 @@ describe("SerDe Test", () => { switch res { | [row] => let json = row["params"] - let parsed = json->S.parseOrRaiseWith(Entities.EntityWithAllTypes.schema) - Assert.deepEqual( - parsed, - entity, - ~message="Postgres json serialization should be compatable with our schema", - ) + let parsed = json->S.parseWith(Entities.EntityWithAllTypes.schema) + switch parsed { + | Ok(parsed) => + Assert.deepEqual( + parsed, + entity, + ~message="Postgres json serialization should be compatable with our schema", + ) + | Error(e) => Assert.fail("Failed to parse entity history: " ++ e->S.Error.reason) + } | _ => Assert.fail("Should have returned a row") } })