diff --git a/.sbtopts b/.sbtopts new file mode 100644 index 0000000..71da33e --- /dev/null +++ b/.sbtopts @@ -0,0 +1,2 @@ +-J-XX:+UseG1GC +-J-Xmx4G diff --git a/.scalafmt.conf b/.scalafmt.conf index f3ca6a5..9b7a7aa 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,11 +1,12 @@ version = "3.5.8" align.preset = none runner.dialect = scala213 + fileOverride { - "glob:**/src/*/scala-3" { + "glob:**/src/**/scala-3/**" { runner.dialect = scala3 } - "glob:**/src/*/scala-2.12" { + "glob:**/src/**/scala-2.12" { runner.dialect = scala212 } } \ No newline at end of file diff --git a/build.sbt b/build.sbt index 9d82d8c..7145d37 100644 --- a/build.sbt +++ b/build.sbt @@ -16,11 +16,13 @@ ThisBuild / scmInfo := Some( ThisBuild / startYear := Some(2020) Global / excludeLintKeys += scmInfo -val Scala213 = "2.13.10" -val scala3 = "3.3.1" +val Scala212 = "2.12.19" +val Scala213 = "2.13.14" +val scala33 = "3.3.4" +val scala36 = "3.6.2" ThisBuild / spiewakMainBranches := Seq("main") -ThisBuild / crossScalaVersions := Seq(Scala213, scala3, "2.12.14") +ThisBuild / crossScalaVersions := Seq(Scala213, scala36, scala33) ThisBuild / versionIntroduced := Map("3.0.0" -> "0.3.0") ThisBuild / scalaVersion := (ThisBuild / crossScalaVersions).value.head ThisBuild / initialCommands := """ @@ -32,6 +34,13 @@ lazy val root = project .in(file(".")) .enablePlugins(NoPublishPlugin, SonatypeCiReleasePlugin) .aggregate(core.js, core.jvm, benchmark) + .settings( + libraryDependencies --= List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full + ) + ) + ) lazy val core = crossProject(JSPlatform, JVMPlatform) .crossType(CrossType.Full) @@ -49,6 +58,23 @@ lazy val core = crossProject(JSPlatform, JVMPlatform) "org.scalameta" %%% "munit" % "0.7.29" % Test, "org.scalameta" %%% "munit-scalacheck" % "0.7.29" % Test ), + // TODO sbt-spiewak-sonatype adds kind-projector 0.13.2 but it does not exist anymore + libraryDependencies --= List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full + ) + ), + libraryDependencies ++= { + if (!scalaBinaryVersion.value.startsWith("3")) { + List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.3" cross CrossVersion.full + ) + ) + } else { + Nil + } + }, mimaBinaryIssueFilters ++= List( ProblemFilters.exclude[ReversedMissingMethodProblem]( "dynosaur.Schema.dynosaur$Schema$$read_" @@ -78,6 +104,13 @@ lazy val benchmark = project .dependsOn(core.jvm) .enablePlugins(JmhPlugin) .disablePlugins(MimaPlugin) + .settings( + libraryDependencies --= List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full + ) + ) + ) lazy val jsdocs = project .dependsOn(core.js) @@ -86,6 +119,13 @@ lazy val jsdocs = project libraryDependencies += "org.scala-js" %%% "scalajs-dom" % "2.3.0" ) .enablePlugins(ScalaJSPlugin) + .settings( + libraryDependencies --= List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full + ) + ) + ) lazy val docs = project .in(file("mdoc")) @@ -104,6 +144,13 @@ lazy val docs = project ) .dependsOn(core.jvm) .enablePlugins(MdocPlugin, NoPublishPlugin) + .settings( + libraryDependencies --= List( + compilerPlugin( + "org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full + ) + ) + ) ThisBuild / githubWorkflowJavaVersions := Seq(JavaSpec.temurin("11")) diff --git a/modules/benchmark/src/main/scala/DecodingBench.scala b/modules/benchmark/src/main/scala/DecodingBench.scala index 839644f..c2180c5 100644 --- a/modules/benchmark/src/main/scala/DecodingBench.scala +++ b/modules/benchmark/src/main/scala/DecodingBench.scala @@ -18,190 +18,18 @@ package dynosaur import cats.syntax.all._ -import org.openjdk.jmh.annotations.{ - Benchmark, - BenchmarkMode, - Mode -} +import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Mode} import cats.data.NonEmptyList -sealed trait Dynosaur -object Dynosaur { - case class Parasaurolophus(name: String, age: Int, songs: Int) - extends Dynosaur - case class TyrannosaurusRex(name: String, age: Int, victims: Int) - extends Dynosaur - case class Allosaurus(name: String, age: Int, attacks: Int) extends Dynosaur -} - -import Dynosaur._ - -object DecodingBench { - - val tyrannosaurusRexDv = DynamoValue.m( - "name" -> DynamoValue.s("Foolio"), - "age" -> DynamoValue.n(20000000), - "victims" -> DynamoValue.n(9) - ) - - val parasaurolophusDv = DynamoValue.m( - "name" -> DynamoValue.s("Cantolio"), - "age" -> DynamoValue.n(25000000), - "songs" -> DynamoValue.n(9) - ) - - val allosaurusDv = DynamoValue.m( - "name" -> DynamoValue.s("Cantolio"), - "age" -> DynamoValue.n(25000000), - "attacks" -> DynamoValue.n(99) - ) - - val tyrannosaurusRexWithTagDv = DynamoValue.m( - "tyrannosaurus-rex" -> tyrannosaurusRexDv - ) - - val allosaurusWithTagDv = DynamoValue.m( - "allosaurus" -> allosaurusDv - ) - - val parasaurolophusWithTagDv = DynamoValue.m( - "parasaurolophus" -> parasaurolophusDv - ) - - val allosauruses = DynamoValue.l( - (0 until 10) - .map(_ => allosaurusDv) - .toList - ) - - val dynosaursWithTag = DynamoValue.l( - tyrannosaurusRexWithTagDv, - allosaurusWithTagDv, - parasaurolophusWithTagDv - ) - - val dynosaurDvWithDiscriminator = DynamoValue.m( - "name" -> DynamoValue.s("Foolio"), - "age" -> DynamoValue.n(20000000), - "victims" -> DynamoValue.n(9), - "kind" -> DynamoValue.s("tyrannosaurus-rex") - ) - - val schermaForParasaurolophus: Schema[Parasaurolophus] = - Schema.record[Parasaurolophus] { fields => - ( - fields("name", _.name), - fields("age", _.age), - fields("songs", _.songs) - ).mapN(Parasaurolophus.apply) - } - - def defSchermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = - Schema.record[TyrannosaurusRex] { fields => - ( - fields("name", _.name), - fields("age", _.age), - fields("victims", _.victims) - ).mapN(TyrannosaurusRex.apply) - } - - val schermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = - defSchermaForTyrannosaurusRex - - implicit def implicitSchermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = - defSchermaForTyrannosaurusRex - - val schermaForAllosaurus: Schema[Allosaurus] = Schema.record[Allosaurus] { - fields => - ( - fields("name", _.name), - fields("age", _.age), - fields("attacks", _.attacks) - ).mapN(Allosaurus.apply) - } - - def defSchermaForAllosaurus: Schema[Allosaurus] = Schema.record[Allosaurus] { - fields => - ( - fields("name", _.name), - fields("age", _.age), - fields("attacks", _.attacks) - ).mapN(Allosaurus.apply) - } - - implicit lazy val implicitSchermaForAllosaurus: Schema[Allosaurus] = - defSchermaForAllosaurus - - val schermaForParasaurolophusWithDiscriminator: Schema[Parasaurolophus] = - Schema.record[Parasaurolophus] { fields => - fields.const("kind", "parasaurolophus") *> - ( - fields("name", _.name), - fields("age", _.age), - fields("songs", _.songs) - ).mapN(Parasaurolophus.apply) - } - - val schermaForTyrannosaurusRexWithDiscriminator: Schema[TyrannosaurusRex] = - Schema.record[TyrannosaurusRex] { fields => - fields.const("kind", "tyrannosaurus-rex") *> - ( - fields("name", _.name), - fields("age", _.age), - fields("victims", _.victims) - ).mapN(TyrannosaurusRex.apply) - } - - val schermaForAllosaurusWithDiscriminator: Schema[Allosaurus] = - Schema.record[Allosaurus] { fields => - fields.const("kind", "allosaurus") *> - ( - fields("name", _.name), - fields("age", _.age), - fields("attacks", _.attacks) - ).mapN(Allosaurus.apply) - } - - val schemaForDynosaurWithTag: Schema[Dynosaur] = Schema.oneOf { alt => - NonEmptyList - .of( - alt(schermaForParasaurolophus.tag("parasaurolophus")), - alt(schermaForTyrannosaurusRex.tag("tyrannosaurus-rex")), - alt(schermaForAllosaurus.tag("allosaurus")) - ) - .reduceLeft(_ |+| _) - } - - val schemaForDynosaurWithDiscriminator: Schema[Dynosaur] = Schema.oneOf { - alt => - NonEmptyList - .of( - alt(schermaForParasaurolophusWithDiscriminator), - alt(schermaForTyrannosaurusRexWithDiscriminator), - alt(schermaForAllosaurusWithDiscriminator) - ) - .reduceLeft(_ |+| _) - } - - val schermaForAllosauruses = - Schema.seq(schermaForAllosaurus) - - val string = DynamoValue.s("dynosaur") - val strings = DynamoValue.l((0 until 10).map { idx => - DynamoValue.s(s"test-$idx") - }.toList) - - val schemaForStrings = Schema.seq(Schema.string) -} +import schemas._ +import data._ class DecodingBench { - import DecodingBench._ - @Benchmark @BenchmarkMode(Array(Mode.Throughput)) def decodeAnS = - Schema.string.read(string) + Schema.string.read(stringDv) @Benchmark @BenchmarkMode(Array(Mode.Throughput)) @@ -216,10 +44,10 @@ class DecodingBench { @Benchmark @BenchmarkMode(Array(Mode.Throughput)) def decodeOneOfWithDiscriminator = - schemaForDynosaurWithDiscriminator.read(dynosaurDvWithDiscriminator) + schemaForDynosaurWithDiscriminator.read(dynosaurWithDiscriminatorDv) @Benchmark @BenchmarkMode(Array(Mode.Throughput)) - def decodeList = schemaForStrings.read(strings) + def decodeList = schemaForStrings.read(stringsDv) } diff --git a/modules/benchmark/src/main/scala/EncodingBench.scala b/modules/benchmark/src/main/scala/EncodingBench.scala new file mode 100644 index 0000000..5bba489 --- /dev/null +++ b/modules/benchmark/src/main/scala/EncodingBench.scala @@ -0,0 +1,86 @@ +/* + * Copyright 2020 Fabio Labella + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dynosaur + +import cats.syntax.all._ + +import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Mode} +import cats.data.NonEmptyList +import scala.jdk.CollectionConverters._ +import schemas._ +import data._ + +class EncodingBench { + + @Benchmark + @BenchmarkMode(Array(Mode.Throughput)) + def encodeAnS = + Schema.string.write(string) + + @Benchmark + @BenchmarkMode(Array(Mode.Throughput)) + def encodeRawAnM = + DynamoValue.m( + Map( + "name" -> DynamoValue.s(allosaurus.name), + "age" -> DynamoValue.n(allosaurus.age), + "attacks" -> DynamoValue.n(allosaurus.attacks) + ) + ) + + @Benchmark + @BenchmarkMode(Array(Mode.Throughput)) + def encodeRawAnMAsAv = { + val map = new java.util.IdentityHashMap[ + String, + software.amazon.awssdk.services.dynamodb.model.AttributeValue + ](3) + map.put( + "name", + software.amazon.awssdk.services.dynamodb.model.AttributeValue + .builder() + .s(allosaurus.name) + .build() + ) + map.put( + "age", + software.amazon.awssdk.services.dynamodb.model.AttributeValue + .builder() + .n(allosaurus.age.toString) + .build() + ) + map.put( + "attacks", + software.amazon.awssdk.services.dynamodb.model.AttributeValue + .builder() + .n(allosaurus.attacks.toString) + .build() + ) + DynamoValue( + software.amazon.awssdk.services.dynamodb.model.AttributeValue + .builder() + .m(map) + .build() + ) + } + + @Benchmark + @BenchmarkMode(Array(Mode.Throughput)) + def encodeAnM = + schermaForAllosaurus.write(allosaurus) + +} diff --git a/modules/benchmark/src/main/scala/data.scala b/modules/benchmark/src/main/scala/data.scala new file mode 100644 index 0000000..262d546 --- /dev/null +++ b/modules/benchmark/src/main/scala/data.scala @@ -0,0 +1,64 @@ +package dynosaur + +object data { + val tyrannosaurusRexDv = DynamoValue.m( + "name" -> DynamoValue.s("Foolio"), + "age" -> DynamoValue.n(20000000), + "victims" -> DynamoValue.n(9) + ) + + val parasaurolophusDv = DynamoValue.m( + "name" -> DynamoValue.s("Cantolio"), + "age" -> DynamoValue.n(25000000), + "songs" -> DynamoValue.n(9) + ) + + val allosaurus = Allosaurus( + name = "Cantolio", + age = 25000000, + attacks = 99 + ) + val allosaurusDv = DynamoValue.m( + "name" -> DynamoValue.s("Cantolio"), + "age" -> DynamoValue.n(25000000), + "attacks" -> DynamoValue.n(99) + ) + + val tyrannosaurusRexWithTagDv = DynamoValue.m( + "tyrannosaurus-rex" -> tyrannosaurusRexDv + ) + + val allosaurusWithTagDv = DynamoValue.m( + "allosaurus" -> allosaurusDv + ) + + val parasaurolophusWithTagDv = DynamoValue.m( + "parasaurolophus" -> parasaurolophusDv + ) + + val allosaurusesDv = DynamoValue.l( + (0 until 10) + .map(_ => allosaurusDv) + .toList + ) + + val dynosaursWithTagDv = DynamoValue.l( + tyrannosaurusRexWithTagDv, + allosaurusWithTagDv, + parasaurolophusWithTagDv + ) + + val string = "dynosaur" + val stringDv = DynamoValue.s("dynosaur") + val stringsDv = DynamoValue.l((0 until 10).map { idx => + DynamoValue.s(s"test-$idx") + }.toList) + + val dynosaurWithDiscriminatorDv = DynamoValue.m( + "name" -> DynamoValue.s("Foolio"), + "age" -> DynamoValue.n(20000000), + "victims" -> DynamoValue.n(9), + "kind" -> DynamoValue.s("tyrannosaurus-rex") + ) + +} diff --git a/modules/benchmark/src/main/scala/model.scala b/modules/benchmark/src/main/scala/model.scala new file mode 100644 index 0000000..16944a8 --- /dev/null +++ b/modules/benchmark/src/main/scala/model.scala @@ -0,0 +1,23 @@ +/* + * Copyright 2020 Fabio Labella + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dynosaur + +sealed trait Dynosaur +case class Parasaurolophus(name: String, age: Int, songs: Int) extends Dynosaur +case class TyrannosaurusRex(name: String, age: Int, victims: Int) + extends Dynosaur +case class Allosaurus(name: String, age: Int, attacks: Int) extends Dynosaur diff --git a/modules/benchmark/src/main/scala/schema.scala b/modules/benchmark/src/main/scala/schema.scala new file mode 100644 index 0000000..994b093 --- /dev/null +++ b/modules/benchmark/src/main/scala/schema.scala @@ -0,0 +1,126 @@ +/* + * Copyright 2020 Fabio Labella + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dynosaur + +import cats.syntax.all._ + +import cats.data.NonEmptyList + +object schemas { + + val schermaForParasaurolophus: Schema[Parasaurolophus] = + Schema.record[Parasaurolophus] { fields => + ( + fields("name", _.name), + fields("age", _.age), + fields("songs", _.songs) + ).mapN(Parasaurolophus.apply) + } + + def defSchermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = + Schema.record[TyrannosaurusRex] { fields => + ( + fields("name", _.name), + fields("age", _.age), + fields("victims", _.victims) + ).mapN(TyrannosaurusRex.apply) + } + + val schermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = + defSchermaForTyrannosaurusRex + + implicit def implicitSchermaForTyrannosaurusRex: Schema[TyrannosaurusRex] = + defSchermaForTyrannosaurusRex + + val schermaForAllosaurus: Schema[Allosaurus] = Schema.record[Allosaurus] { + fields => + ( + fields("name", _.name), + fields("age", _.age), + fields("attacks", _.attacks) + ).mapN(Allosaurus.apply) + } + + def defSchermaForAllosaurus: Schema[Allosaurus] = Schema.record[Allosaurus] { + fields => + ( + fields("name", _.name), + fields("age", _.age), + fields("attacks", _.attacks) + ).mapN(Allosaurus.apply) + } + + implicit lazy val implicitSchermaForAllosaurus: Schema[Allosaurus] = + defSchermaForAllosaurus + + val schermaForParasaurolophusWithDiscriminator: Schema[Parasaurolophus] = + Schema.record[Parasaurolophus] { fields => + fields.const("kind", "parasaurolophus") *> + ( + fields("name", _.name), + fields("age", _.age), + fields("songs", _.songs) + ).mapN(Parasaurolophus.apply) + } + + val schermaForTyrannosaurusRexWithDiscriminator: Schema[TyrannosaurusRex] = + Schema.record[TyrannosaurusRex] { fields => + fields.const("kind", "tyrannosaurus-rex") *> + ( + fields("name", _.name), + fields("age", _.age), + fields("victims", _.victims) + ).mapN(TyrannosaurusRex.apply) + } + + val schermaForAllosaurusWithDiscriminator: Schema[Allosaurus] = + Schema.record[Allosaurus] { fields => + fields.const("kind", "allosaurus") *> + ( + fields("name", _.name), + fields("age", _.age), + fields("attacks", _.attacks) + ).mapN(Allosaurus.apply) + } + + val schemaForDynosaurWithTag: Schema[Dynosaur] = Schema.oneOf { alt => + NonEmptyList + .of( + alt(schermaForParasaurolophus.tag("parasaurolophus")), + alt(schermaForTyrannosaurusRex.tag("tyrannosaurus-rex")), + alt(schermaForAllosaurus.tag("allosaurus")) + ) + .reduceLeft(_ |+| _) + } + + val schemaForDynosaurWithDiscriminator: Schema[Dynosaur] = Schema.oneOf { + alt => + NonEmptyList + .of( + alt(schermaForParasaurolophusWithDiscriminator), + alt(schermaForTyrannosaurusRexWithDiscriminator), + alt(schermaForAllosaurusWithDiscriminator) + ) + .reduceLeft(_ |+| _) + } + + val schermaForAllosauruses = + Schema.seq(schermaForAllosaurus) + + val schemaForStrings = Schema.seq(Schema.string) + +} diff --git a/modules/core/shared/src/main/scala/internal/decoding.scala b/modules/core/js/src/main/scala/internal/decoding.scala similarity index 76% rename from modules/core/shared/src/main/scala/internal/decoding.scala rename to modules/core/js/src/main/scala/internal/decoding.scala index e5ddcf1..bcefad6 100644 --- a/modules/core/shared/src/main/scala/internal/decoding.scala +++ b/modules/core/js/src/main/scala/internal/decoding.scala @@ -43,13 +43,7 @@ object decoding { case Nul => decodeNull case Sequence(elem) => decodeSequence(elem, _) case Dictionary(elem) => decodeDictionary(elem, _) - case Record(rec) => { value => - // val here caches the traversal of the record - val cachedDecoder = decodeRecord(rec) - value.m - .toRight(ReadError(s"value ${value.toString()} is not a Dictionary")) - .flatMap(cachedDecoder) - } + case r: Record[A] => decodeRecord(r) case Sum(cases) => decodeSum(cases) case Isos(iso) => decodeIsos(iso, _) case Defer(schema) => schema().read @@ -139,36 +133,6 @@ object decoding { .traverse(schema.read) ) - def decodeRecord[R]( - recordSchema: FreeApplicative[Field[R, *], R] - ): Map[String, DynamoValue] => Res[R] = { - - type Target[A] = - Kleisli[Either[ReadError, *], Map[String, DynamoValue], A] - - recordSchema.foldMap { - new (Field[R, *] ~> Target) { - def apply[A](field: Field[R, A]) = - field match { - case Field.Required(name, elemSchema, _) => - Kleisli { (v: Map[String, DynamoValue]) => - v.get(name) - .toRight( - ReadError(s"required field $name does not contain a value") - ) - .flatMap(v => elemSchema.read(v)) - } - case Field.Optional(name, elemSchema, _) => - Kleisli { (v: Map[String, DynamoValue]) => - v - .get(name) - .traverse(v => elemSchema.read(v)) - } - } - } - }.run - } - def decodeSum[A](cases: Chain[Alt[A]]): DynamoValue => Res[A] = { type Decode = DynamoValue => Either[List[ReadError], A] @@ -203,4 +167,46 @@ object decoding { .read(v) .flatMap(xmap.r) + def decodeRecord[R](record: Record[R]): DynamoValue => Either[ReadError, R] = + value => + value.m + .toRight(ReadError(s"value ${value.toString()} is not a Dictionary")) + .flatMap { map => + val decodedValues = new Array[Any](record.fields.length) + var i = 0 + var error: ReadError = null + + while (i < record.fields.length && error == null) { + record.fields(i) match { + case Field.Optional(name, schema, get) => + map.get(name) match { + case None => decodedValues(i) = None + case Some(value) => + schema.read(value) match { + case Left(err) => error = err + case Right(v) => decodedValues(i) = Some(v) + } + } + + case Field.Required(name, schema, get) => + map.get(name) match { + case None => + error = ReadError( + s"required field $name does not contain a value" + ) + case Some(value) => + schema.read(value) match { + case Left(err) => error = err + case Right(v) => decodedValues(i) = v + } + } + + } + i += 1 + } + + if (error != null) Left(error) + else Right(record.build(decodedValues.toList)) + } + } diff --git a/modules/core/shared/src/main/scala/internal/encoding.scala b/modules/core/js/src/main/scala/internal/encoding.scala similarity index 72% rename from modules/core/shared/src/main/scala/internal/encoding.scala rename to modules/core/js/src/main/scala/internal/encoding.scala index 1d6c05e..4ae58d0 100644 --- a/modules/core/shared/src/main/scala/internal/encoding.scala +++ b/modules/core/js/src/main/scala/internal/encoding.scala @@ -42,7 +42,7 @@ object encoding { case Nul => encodeNull case Sequence(elem) => encodeSequence(elem, _) case Dictionary(elem) => encodeDictionary(elem, _) - case Record(rec) => encodeRecord(rec) + case r: Record[A] => encodeRecord(r) case Sum(cases) => encodeSum(cases) case Isos(iso) => encodeIsos(iso, _) case Defer(schema) => schema().write @@ -76,46 +76,6 @@ object encoding { .traverse(schema.write) .map(DynamoValue.m) - def encodeRecord[R]( - recordSchema: FreeApplicative[Field[R, *], R] - ): R => Res = { - - implicit def overrideKeys[T]: Monoid[Map[String, T]] = - MonoidK[Map[String, *]].algebra - - type Target[A] = R => Either[WriteError, Map[String, DynamoValue]] - - recordSchema - .analyze { - new (Field[R, *] ~> Target) { - - def write[E]( - name: String, - schema: Schema[E], - elem: E - ): Either[WriteError, Map[String, DynamoValue]] = - schema.write(elem).map { av => Map(name -> av) } - - def apply[B](field: Field[R, B]) = - field match { - case Field.Required(name, elemSchema, get) => - (record: R) => { - val elem = get(record) - write(name, elemSchema, elem) - } - case Field.Optional(name, elemSchema, get) => - (record: R) => { - val elem = get(record) - elem - .foldMap(write(name, elemSchema, _)) - } - } - } - } - .andThen(_.map(DynamoValue.m)) - - } - def encodeSum[C](cases: Chain[Alt[C]]): C => Res = { implicit def orElse[T]: Monoid[Option[T]] = MonoidK[Option].algebra @@ -137,4 +97,38 @@ object encoding { def encodeIsos[V](xmap: XMap[V], value: V): Res = xmap.w(value).flatMap(v => xmap.schema.write(v)) + + def encodeRecord[R]( + record: Record[R] + ): R => Either[WriteError, DynamoValue] = { value => + var i = 0 + var error: WriteError = null + + val dict = scalajs.js.Dictionary[AttributeValue]() + + while (i < record.fields.length && error == null) { + record.fields(i) match { + case Field.Required(name, schema, get) => + schema.write(get(value)) match { + case Left(err) => error = err + case Right(v) => dict.addOne(name, v.value) + } + case Field.Optional(name, schema, get) => + val fieldValue = get(value) + if (fieldValue != None) { + schema.write(fieldValue.get) match { + case Left(err) => error = err + case Right(v) => dict.addOne(name, v.value) + } + } + } + + i += 1 + } + + if (error != null) + Left(error) + else + DynamoValue(AttributeValue.M(dict)).asRight[WriteError] + } } diff --git a/modules/core/jvm/src/main/scala/internal/decoding.scala b/modules/core/jvm/src/main/scala/internal/decoding.scala new file mode 100644 index 0000000..bcefad6 --- /dev/null +++ b/modules/core/jvm/src/main/scala/internal/decoding.scala @@ -0,0 +1,212 @@ +/* + * Copyright 2020 Fabio Labella + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dynosaur +package internal + +import cats.~> +import cats.syntax.all._ +import alleycats.std.map._ +import cats.free.FreeApplicative +import cats.data.{Chain, Kleisli} +import scodec.bits.ByteVector + +import Schema.ReadError +import Schema.structure._ +import scala.annotation.tailrec +import scala.collection.mutable.Builder + +object decoding { + def fromSchema[A](s: Schema[A]): DynamoValue => Either[ReadError, A] = + s match { + case Identity => _.asRight + case Num => decodeNum + case Str => decodeString + case Bool => decodeBool + case Bytes => decodeBytes + case BytesSet => decodeBytesSet + case NumSet => decodeNumSet + case StrSet => decodeStrSet + case Nul => decodeNull + case Sequence(elem) => decodeSequence(elem, _) + case Dictionary(elem) => decodeDictionary(elem, _) + case r: Record[A] => decodeRecord(r) + case Sum(cases) => decodeSum(cases) + case Isos(iso) => decodeIsos(iso, _) + case Defer(schema) => schema().read + } + + type Res[A] = Either[ReadError, A] + + def decodeBool: DynamoValue => Res[Boolean] = { value => + value.bool.toRight(ReadError(s"value ${value.toString()} is not a Boolean")) + } + + def decodeNum: DynamoValue => Res[DynamoValue.Number] = { value => + value.n.toRight(ReadError(s"value ${value.toString()} is not a Number")) + } + + def decodeString: DynamoValue => Res[String] = { value => + value.s.toRight(ReadError(s"value ${value.toString()} is not a String")) + } + + def decodeBytes: DynamoValue => Res[ByteVector] = { value => + value.b.toRight(ReadError(s"value ${value.toString()} is not a ByteVector")) + } + + def decodeBytesSet: DynamoValue => Res[NonEmptySet[ByteVector]] = { value => + value.bs.toRight( + ReadError(s"value ${value.toString()} is not a ByteVector Set") + ) + } + + def decodeNumSet: DynamoValue => Res[NonEmptySet[DynamoValue.Number]] = { + value => + value.ns.toRight( + ReadError(s"value ${value.toString()} is not a Number Set") + ) + } + + def decodeStrSet: DynamoValue => Res[NonEmptySet[String]] = { value => + value.ss.toRight( + ReadError(s"value ${value.toString()} is not a String Set") + ) + } + + def decodeNull: DynamoValue => Res[Unit] = { value => + value.nul.toRight( + ReadError(s"value ${value.toString()} is not a Null") + ) + } + + def decodeSequence[V]( + schema: Schema[V], + value: DynamoValue + ): Res[List[V]] = { + value.l match { + case None => + Left(ReadError(s"value ${value.toString()} is not a Sequence")) + case Some(xs) => + val lb = List.newBuilder[V] + + @tailrec + def loop( + xs: List[DynamoValue], + result: Builder[V, List[V]] + ): Either[ReadError, Builder[V, List[V]]] = xs match { + case head :: next => + schema.read(head) match { + case Right(value) => + loop(next, result += value) + case Left(error) => + Left(error) + } + case Nil => + Right(result) + } + + loop(xs, lb).map(_.result()) + } + } + + def decodeDictionary[V]( + schema: Schema[V], + value: DynamoValue + ): Res[Map[String, V]] = + value.m + .toRight(ReadError(s"value ${value.toString()} is not a Dictionary")) + .flatMap( + _.map { case (k, v) => k -> v } + .traverse(schema.read) + ) + + def decodeSum[A](cases: Chain[Alt[A]]): DynamoValue => Res[A] = { + + type Decode = DynamoValue => Either[List[ReadError], A] + + val baseDecode: Decode = (_: DynamoValue) => + Either.left[List[ReadError], A](List.empty[ReadError]) + + cases + .foldLeft[Decode](baseDecode) { (acc, alt) => + acc.flatMap { + case Left(value) => + (v) => + alt.caseSchema + .read(v) + .map(alt.prism.inject) + .leftMap(e => e :: value) + case ok: Right[List[ReadError], A] => _ => ok + } + } + .andThen { res => + res.leftMap { errors => + val errorsDetails = errors.map(_.message).mkString("[", ",", "]") + ReadError( + s"value doesn't match any of the alternatives: $errorsDetails" + ) + } + } + } + + def decodeIsos[V](xmap: XMap[V], v: DynamoValue): Res[V] = + xmap.schema + .read(v) + .flatMap(xmap.r) + + def decodeRecord[R](record: Record[R]): DynamoValue => Either[ReadError, R] = + value => + value.m + .toRight(ReadError(s"value ${value.toString()} is not a Dictionary")) + .flatMap { map => + val decodedValues = new Array[Any](record.fields.length) + var i = 0 + var error: ReadError = null + + while (i < record.fields.length && error == null) { + record.fields(i) match { + case Field.Optional(name, schema, get) => + map.get(name) match { + case None => decodedValues(i) = None + case Some(value) => + schema.read(value) match { + case Left(err) => error = err + case Right(v) => decodedValues(i) = Some(v) + } + } + + case Field.Required(name, schema, get) => + map.get(name) match { + case None => + error = ReadError( + s"required field $name does not contain a value" + ) + case Some(value) => + schema.read(value) match { + case Left(err) => error = err + case Right(v) => decodedValues(i) = v + } + } + + } + i += 1 + } + + if (error != null) Left(error) + else Right(record.build(decodedValues.toList)) + } + +} diff --git a/modules/core/jvm/src/main/scala/internal/encoding.scala b/modules/core/jvm/src/main/scala/internal/encoding.scala new file mode 100644 index 0000000..45fe959 --- /dev/null +++ b/modules/core/jvm/src/main/scala/internal/encoding.scala @@ -0,0 +1,146 @@ +/* + * Copyright 2020 Fabio Labella + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dynosaur +package internal + +import cats.{~>, Monoid, MonoidK} +import cats.syntax.all._ +import alleycats.std.map._ +import cats.data.Chain +import cats.free.FreeApplicative + +import scodec.bits.ByteVector + +import Schema.WriteError +import Schema.structure._ + +object encoding { + def fromSchema[A](s: Schema[A]): A => Either[WriteError, DynamoValue] = + s match { + case Identity => (_: DynamoValue).asRight + case Num => encodeNum + case Str => encodeString + case Bool => encodeBool + case Bytes => encodeBytes + case BytesSet => encodeBytesSet + case NumSet => encodeNumSet + case StrSet => encodeStrSet + case Nul => encodeNull + case Sequence(elem) => encodeSequence(elem, _) + case Dictionary(elem) => encodeDictionary(elem, _) + case r: Record[A] => encodeRecord(r) + case Sum(cases) => encodeSum(cases) + case Isos(iso) => encodeIsos(iso, _) + case Defer(schema) => schema().write + } + + type Res = Either[WriteError, DynamoValue] + + def encodeBool: Boolean => Res = DynamoValue.bool(_).asRight + + def encodeNum: DynamoValue.Number => Res = DynamoValue.n(_).asRight + + def encodeString: String => Res = DynamoValue.s(_).asRight + + def encodeBytes: ByteVector => Res = DynamoValue.b(_).asRight + + def encodeBytesSet: NonEmptySet[ByteVector] => Res = + DynamoValue.bs(_).asRight + def encodeNumSet: NonEmptySet[DynamoValue.Number] => Res = + DynamoValue.ns(_).asRight + def encodeStrSet: NonEmptySet[String] => Res = + DynamoValue.ss(_).asRight + + def encodeNull: Unit => Res = _ => DynamoValue.nul.asRight + + def encodeSequence[V](schema: Schema[V], value: List[V]) = + value.traverse(schema.write).map(DynamoValue.l) + + def encodeDictionary[V](schema: Schema[V], value: Map[String, V]) = + value + .map { case (k, v) => k -> v } + .traverse(schema.write) + .map(DynamoValue.m) + + def encodeSum[C](cases: Chain[Alt[C]]): C => Res = { + implicit def orElse[T]: Monoid[Option[T]] = + MonoidK[Option].algebra + + cases + .foldMap { alt => (coproduct: C) => + alt.prism.tryGet(coproduct).map { elem => + alt.caseSchema.write(elem) + } + } + .andThen( + _.getOrElse( + WriteError( + "Alternative not specified for all possible subtypes" + ).asLeft + ) + ) + } + + def encodeIsos[V](xmap: XMap[V], value: V): Res = + xmap.w(value).flatMap(v => xmap.schema.write(v)) + + def encodeRecord[R]( + record: Record[R] + ): R => Either[WriteError, DynamoValue] = { + val fieldCount = record.fields.length + + { value => + { + var i = 0 + var error: WriteError = null + + val map = + new java.util.IdentityHashMap[String, AttributeValue](fieldCount) + + while (i < record.fields.length && error == null) { + record.fields(i) match { + case Field.Required(name, schema, get) => + schema.write(get(value)) match { + case Left(err) => error = err + case Right(v) => map.put(name, v.value) + } + case Field.Optional(name, schema, get) => + val fieldValue = get(value) + if (fieldValue != None) { + schema.write(fieldValue.get) match { + case Left(err) => error = err + case Right(v) => map.put(name, v.value) + } + } + } + + i += 1 + } + + if (error != null) + Left(error) + else + Right( + DynamoValue( + software.amazon.awssdk.services.dynamodb.model.AttributeValue + .fromM(map) + ) + ) + } + } + } +} diff --git a/modules/core/shared/src/main/scala/Schema.scala b/modules/core/shared/src/main/scala/Schema.scala index 9e0d8f2..35f8d42 100644 --- a/modules/core/shared/src/main/scala/Schema.scala +++ b/modules/core/shared/src/main/scala/Schema.scala @@ -16,6 +16,7 @@ package dynosaur +import cats._ import cats.syntax.all._ import cats.free.FreeApplicative import cats.data.Chain @@ -35,20 +36,17 @@ sealed trait Schema[A] { self => import structure._ private var read_ : DynamoValue => Either[ReadError, A] = null - private var write_ : A => Either[WriteError, DynamoValue] = null def read(v: DynamoValue): Either[ReadError, A] = { if (read_ == null) read_ = internal.decoding.fromSchema(this) - read_(v) } def write(a: A): Either[WriteError, DynamoValue] = { if (write_ == null) write_ = internal.encoding.fromSchema(this) - write_(a) } @@ -173,12 +171,60 @@ object Schema { def nullable[A](implicit s: Schema[A]): Schema[Option[A]] = s.nullable - def fields[R](p: FreeApplicative[Field[R, *], R]): Schema[R] = Record(p) + case class OptimizedRecord[R, A]( + fields: List[Field[R, _]], + build: List[Any] => A + ) + + def fields[R](p: FreeApplicative[Field[R, *], R]): Schema[R] = { + val optimized = optimizeRecord(p) + Record(optimized.fields, optimized.build) + } + def record[R]( b: FieldBuilder[R] => FreeApplicative[Field[R, *], R] ): Schema[R] = fields(b(field)) + private def optimizeRecord[R, A]( + fa: FreeApplicative[Field[R, *], A] + ): OptimizedRecord[R, A] = { + implicit val optimizedRecordApplicative = + new cats.Applicative[λ[α => OptimizedRecord[R, α]]] { + def pure[X](x: X): OptimizedRecord[R, X] = + OptimizedRecord(Nil, _ => x) + + def ap[X, Y]( + ff: OptimizedRecord[R, X => Y] + )(fa: OptimizedRecord[R, X]): OptimizedRecord[R, Y] = + OptimizedRecord( + ff.fields ++ fa.fields, + values => { + val (f, x) = values.splitAt(ff.fields.length) match { + case (ffValues, faValues) => + (ff.build(ffValues), fa.build(faValues)) + } + f(x) + } + ) + } + + fa.foldMap(new (Field[R, *] ~> λ[α => OptimizedRecord[R, α]]) { + def apply[X](field: Field[R, X]): OptimizedRecord[R, X] = field match { + case f @ Field.Required(name, schema, get) => + OptimizedRecord( + List(f), + values => values.head.asInstanceOf[X] + ) + case f @ Field.Optional(name, schema, get) => + OptimizedRecord( + List(f.asInstanceOf[Field[R, _]]), + values => values.head.asInstanceOf[X] + ) + } + }) + } + def alternatives[A](cases: Chain[Alt[A]]): Schema[A] = Sum(cases) def oneOf[A](b: AltBuilder[A] => Chain[Alt[A]]): Schema[A] = @@ -246,13 +292,31 @@ object Schema { case object StrSet extends Schema[NonEmptySet[String]] case class Dictionary[A](value: Schema[A]) extends Schema[Map[String, A]] case class Sequence[A](value: Schema[A]) extends Schema[List[A]] - case class Record[R](value: FreeApplicative[Field[R, *], R]) - extends Schema[R] + + case class Record[R]( + fields: List[Field[R, ?]], + build: List[Any] => R, + fieldNames: Array[String] + ) extends Schema[R] + + object Record { + def apply[R]( + fields: List[Field[R, ?]], + build: List[Any] => R + ): Record[R] = { + val fieldNames = fields.map(_.name).toArray + new Record(fields, build, fieldNames) + } + } + case class Sum[A](value: Chain[Alt[A]]) extends Schema[A] case class Isos[A](value: XMap[A]) extends Schema[A] case class Defer[A](value: () => Schema[A]) extends Schema[A] - sealed trait Field[R, E] + sealed trait Field[R, E] { + def name: String + } + object Field { case class Required[R, E]( name: String, @@ -281,7 +345,6 @@ object Schema { } } - // TODO use parseFromString from Numeric, 2.13+ private def num[A: Numeric](convert: String => A): Schema[A] = Num.imapErr { v => Either diff --git a/modules/core/shared/src/test/scala/SchemaSuite.scala b/modules/core/shared/src/test/scala/SchemaSuite.scala index 0802aa0..4286a72 100644 --- a/modules/core/shared/src/test/scala/SchemaSuite.scala +++ b/modules/core/shared/src/test/scala/SchemaSuite.scala @@ -85,19 +85,20 @@ class SchemaSuite extends ScalaCheckSuite { case class Paragraph(text: String) extends Text case class Section(title: String, contents: List[Text]) extends Text - def check[A](schema: Schema[A], data: A, expected: V) = { + def check[A](schema: Schema[A], data: A, expected: V)(implicit + loc: munit.Location + ) = { val output = schema .write(data) - .toOption - assertEquals(output, expected.some, clue(data)) + assertEquals(output, expected.asRight, clue(data)) val roundTrip = output.flatMap { output => - schema.read(output).toOption + schema.read(output) } - assertEquals(roundTrip, data.some) + assertEquals(roundTrip, data.asRight) } def checkNotFail[A](schema: Schema[A], data: A) = { @@ -162,6 +163,33 @@ class SchemaSuite extends ScalaCheckSuite { loop(deep, Paragraph("dolor sit amet")) } + def deepDepartment(depth: Int, width: Int = 1): Department = { + if (depth <= 0) { + Department(s"Leaf", Nil) + } else { + Department( + s"Level-$depth", + List.fill(width)(deepDepartment(depth - 1, width)) + ) + } + } + + def deepDepartmentDynamoValue(depth: Int, width: Int = 1): DynamoValue = { + if (depth <= 0) { + DynamoValue.m( + "name" -> DynamoValue.s("Leaf"), + "subdeps" -> DynamoValue.l(List.empty) + ) + } else { + DynamoValue.m( + "name" -> DynamoValue.s(s"Level-$depth"), + "subdeps" -> DynamoValue.l( + List.fill(width)(deepDepartmentDynamoValue(depth - 1, width)) + ) + ) + } + } + test("id") { forAllNoShrink { (dv: V) => val expected = dv @@ -305,10 +333,7 @@ class SchemaSuite extends ScalaCheckSuite { check(versionedSchema, user, expected) } - test("products with optional fields") { - val complete = Log("complete log", "tag".some) - val noTag = Log("incomplete log", None) - + test("products with optional fields should handle presence") { val schema = Schema.record[Log] { field => ( field("msg", _.msg), @@ -316,31 +341,55 @@ class SchemaSuite extends ScalaCheckSuite { ).mapN(Log.apply) } - val expectedComplete = V.m( + val complete = Log("complete log", "tag".some) + + val expected = V.m( "msg" -> V.s(complete.msg), "tag" -> V.s(complete.tag.get) ) - val expectedNoTag = V.m( + check(schema, complete, expected) + } + + test("products with optional fields should handle absence") { + + val schema = Schema.record[Log] { field => + ( + field("msg", _.msg), + field.opt("tag", _.tag) + ).mapN(Log.apply) + } + + val noTag = Log("incomplete log", None) + + val expected = V.m( "msg" -> V.s(noTag.msg) ) + check(schema, noTag, expected) + } + + test("products with optional fields should handle incorrect") { + + val schema = Schema.record[Log] { field => + ( + field("msg", _.msg), + field.opt("tag", _.tag) + ).mapN(Log.apply) + } + val incorrectNoTag = V.m( - "msg" -> V.s(noTag.msg), + "msg" -> V.s("incomplete log"), "tag" -> V.nul ) - check(schema, complete, expectedComplete) - check(schema, noTag, expectedNoTag) assertEquals( schema.read(incorrectNoTag), Left(Schema.ReadError("value \"NULL\": true is not a String")) ) } - test("products with nullable values") { - val complete = Log("complete log", "tag".some) - val noTag = Log("incomplete log", None) + test("products with nullable values should handle presence") { val schema = Schema.record[Log] { field => ( @@ -349,22 +398,47 @@ class SchemaSuite extends ScalaCheckSuite { ).mapN(Log.apply) } - val expectedComplete = V.m( + val complete = Log("complete log", "tag".some) + + val expected = V.m( "msg" -> V.s(complete.msg), "tag" -> V.s(complete.tag.get) ) - val expectedNoTag = V.m( + check(schema, complete, expected) + } + + test("products with nullable values should handle absence") { + + val schema = Schema.record[Log] { field => + ( + field("msg", _.msg), + field("tag", _.tag)(Schema.nullable) + ).mapN(Log.apply) + } + + val noTag = Log("incomplete log", None) + + val expected = V.m( "msg" -> V.s(noTag.msg), "tag" -> V.nul ) + check(schema, noTag, expected) + } + + test("products with nullable values should handle incorrect") { + val schema = Schema.record[Log] { field => + ( + field("msg", _.msg), + field("tag", _.tag)(Schema.nullable) + ).mapN(Log.apply) + } + val incorrectNoTag = V.m( - "msg" -> V.s(noTag.msg) + "msg" -> V.s("incomplete log") ) - check(schema, complete, expectedComplete) - check(schema, noTag, expectedNoTag) assertEquals( schema.read(incorrectNoTag), Left(Schema.ReadError("required field tag does not contain a value")) @@ -541,6 +615,38 @@ class SchemaSuite extends ScalaCheckSuite { check(schema, departments, expected) } + test("recursive products with very deep record") { + val departments = deepDepartment(100, 1) + val expected = deepDepartmentDynamoValue(100, 1); + + val schema: Schema[Department] = Schema.recursive { rec => + Schema.record { field => + ( + field("name", _.name), + field("subdeps", _.subdeps)(rec.asList) + ).mapN(Department.apply) + } + } + + check(schema, departments, expected) + } + + test("recursive products with very wide record") { + val departments = deepDepartment(1, 100) + val expected = deepDepartmentDynamoValue(1, 100); + + val schema: Schema[Department] = Schema.recursive { rec => + Schema.record { field => + ( + field("name", _.name), + field("subdeps", _.subdeps)(rec.asList) + ).mapN(Department.apply) + } + } + + check(schema, departments, expected) + } + test("products with more than 22 fields") { val big = Big( "f", diff --git a/project/plugins.sbt b/project/plugins.sbt index 8f57d75..9c45ede 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.codecommit" % "sbt-spiewak-sonatype" % "0.23.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.4.0") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.14.0") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.6.2") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.17.0") addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.6") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.7")