diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 63d8415c..e1f89f4f 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -28,7 +28,7 @@ jobs: uses: playframework/.github/.github/workflows/cmd.yml@v2 with: java: 11, 8 - scala: 2.11.12, 2.12.16, 2.13.8 + scala: 2.11.12, 2.12.16, 2.13.8 # TODO: 3.1.3 (other play modules not ok) cmd: sbt ++$MATRIX_SCALA docs/test tests: @@ -40,8 +40,8 @@ jobs: uses: playframework/.github/.github/workflows/cmd.yml@v2 with: java: 11, 8 - scala: 2.11.12, 2.12.16, 2.13.8 - cmd: sbt ++$MATRIX_SCALA publishLocal test # scapegoat + scala: 2.11.12, 2.12.16, 2.13.8, 3.1.3 + cmd: sbt ++$MATRIX_SCALA publishLocal test finish: name: Finish diff --git a/.scalafix.conf b/.scalafix.conf index c37b3ef1..55722207 100644 --- a/.scalafix.conf +++ b/.scalafix.conf @@ -12,7 +12,7 @@ OrganizeImports { "re:javax?\\.", "scala.language", "scala.util", "scala.collection", "scala.", - "akka.", + "akka.actor", "akka.stream", "akka.", "play.", "resource.", "acolyte.", diff --git a/.scalafmt.conf b/.scalafmt.conf index 84b504fa..3f9b9b92 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,8 @@ -runner.dialect = scala213 +version = 3.5.8 + +runner.dialect = Scala213Source3 +project.layout = StandardConvention + align.preset = true assumeStandardLibraryStripMargin = true danglingParentheses.preset = true @@ -10,5 +14,10 @@ rewrite.rules = [ AvoidInfix, RedundantParens, SortModifiers, PreferCurlyFors ] rewrite.sortModifiers.order = [ "private", "protected", "final", "sealed", "abstract", "implicit", "override", "lazy" ] spaces.inImportCurlyBraces = true # more idiomatic to include whitepsace in import x.{ yyy } trailingCommas = preserve + newlines.afterCurlyLambda = preserve -version = 3.5.9 +newlines.beforeCurlyLambdaParams = multilineWithCaseOnly + +literals.long=Upper +literals.float=Upper +literals.double=Upper \ No newline at end of file diff --git a/akka/src/test/scala/anorm/AkkaStreamSpec.scala b/akka/src/test/scala/anorm/AkkaStreamSpec.scala index ed77f777..ae806279 100644 --- a/akka/src/test/scala/anorm/AkkaStreamSpec.scala +++ b/akka/src/test/scala/anorm/AkkaStreamSpec.scala @@ -7,21 +7,26 @@ import scala.collection.immutable.Seq import scala.concurrent.Future import scala.concurrent.duration._ +import akka.actor.ActorSystem + +import akka.stream.Materializer import akka.stream.scaladsl.{ Keep, Sink, Source } -import acolyte.jdbc.QueryResult import acolyte.jdbc.AcolyteDSL.withQueryResult import acolyte.jdbc.Implicits._ +import acolyte.jdbc.QueryResult import acolyte.jdbc.RowLists.stringList import org.specs2.concurrent.ExecutionEnv final class AkkaStreamSpec(implicit ee: ExecutionEnv) extends org.specs2.mutable.Specification { - "Akka Stream" title + "Akka Stream".title + + implicit lazy val system: ActorSystem = ActorSystem("anorm-tests") - implicit lazy val system = akka.actor.ActorSystem("knox-core-tests") - implicit def materializer = akka.stream.ActorMaterializer.create(system) + implicit def materializer: Materializer = + akka.stream.ActorMaterializer.create(system) // Akka-Contrib issue with Akka-Stream > 2.5.4 // import akka.stream.contrib.TestKit.assertAllStagesStopped @@ -66,8 +71,9 @@ final class AkkaStreamSpec(implicit ee: ExecutionEnv) extends org.specs2.mutable "on success" in assertAllStagesStopped { withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit con => - runAsync(Sink.seq[String]) must beLike[ResultSet] { case rs => - (rs.isClosed must beTrue).and(rs.getStatement.isClosed must beTrue).and(con.isClosed must beFalse) + runAsync(Sink.seq[String]) must beLike[ResultSet] { + case rs => + (rs.isClosed must beTrue).and(rs.getStatement.isClosed must beTrue).and(con.isClosed must beFalse) }.await(0, 5.seconds) } } diff --git a/build.sbt b/build.sbt index e2b329b3..dd55f275 100644 --- a/build.sbt +++ b/build.sbt @@ -4,25 +4,24 @@ import Common._ import com.typesafe.tools.mima.core._ import com.typesafe.tools.mima.plugin.MimaKeys.{ mimaBinaryIssueFilters, mimaPreviousArtifacts } -/* TODO // Scalafix inThisBuild( List( - //scalaVersion := "2.13.3", + // scalaVersion := "2.13.3", semanticdbEnabled := true, semanticdbVersion := scalafixSemanticdb.revision, - scalafixDependencies ++= Seq( - "com.github.liancheng" %% "organize-imports" % "0.5.0") + scalafixDependencies ++= Seq("com.github.liancheng" %% "organize-imports" % "0.5.0") ) ) - */ val specs2Test = Seq( "specs2-core", - "specs2-junit" -).map("org.specs2" %% _ % "4.10.6" % Test) + "specs2-junit", + "specs2-matcher-extra" +).map("org.specs2" %% _ % "4.10.6" % Test cross (CrossVersion.for3Use2_13)) + .map(_.exclude("org.scala-lang.modules", "*")) -lazy val acolyteVersion = "1.1.4" +lazy val acolyteVersion = "1.2.1" lazy val acolyte = "org.eu.acolyte" %% "jdbc-scala" % acolyteVersion % Test ThisBuild / resolvers ++= Seq("Tatami Snapshots".at("https://raw.github.com/cchantep/tatami/master/snapshots")) @@ -33,22 +32,26 @@ lazy val `anorm-tokenizer` = project .in(file("tokenizer")) .settings( mimaPreviousArtifacts := { - if (scalaBinaryVersion.value == "2.13") { + if (scalaBinaryVersion.value == "3") { Set.empty } else { mimaPreviousArtifacts.value } }, - libraryDependencies ++= Seq( - "org.scala-lang" % "scala-reflect" % scalaVersion.value - ) + libraryDependencies += { + if (scalaBinaryVersion.value == "3") { + "org.scala-lang" %% "scala3-compiler" % scalaVersion.value % Provided + } else { + "org.scala-lang" % "scala-reflect" % scalaVersion.value + } + } ) // --- val armShading = Seq( - libraryDependencies += "com.jsuereth" %% "scala-arm" % "2.1-SNAPSHOT", - assembly / test := {}, + libraryDependencies += ("com.jsuereth" %% "scala-arm" % "2.1-SNAPSHOT").cross(CrossVersion.for3Use2_13), + assembly / test := {}, assembly / assemblyOption ~= { _.withIncludeScala(false) // java libraries shouldn't include scala }, @@ -96,6 +99,23 @@ lazy val parserCombinatorsVer = Def.setting[String] { } } +lazy val coreMimaFilter: ProblemFilter = { + case MissingClassProblem(old) => + !old.fullName.startsWith("resource.") && + old.fullName.indexOf("Macro") == -1 && + !old.fullName.startsWith("anorm.macros.") + + case _ => true +} + +lazy val xmlVer = Def.setting[String] { + if (scalaBinaryVersion.value == "2.11") { + "1.3.0" + } else { + "2.1.0" + } +} + lazy val `anorm-core` = project .in(file("core")) .settings( @@ -104,11 +124,27 @@ lazy val `anorm-core` = project (Compile / sourceGenerators) += Def.task { Seq(GFA((Compile / sourceManaged).value / "anorm")) }.taskValue, - scalacOptions ++= Seq( - "-Xlog-free-terms", - "-P:silencer:globalFilters=missing\\ in\\ object\\ ToSql\\ is\\ deprecated;possibilities\\ in\\ class\\ ColumnNotFound\\ is\\ deprecated;DeprecatedSqlParser\\ in\\ package\\ anorm\\ is\\ deprecated;constructor\\ deprecatedName\\ in\\ class\\ deprecatedName\\ is\\ deprecated" - ), - (Test / scalacOptions) ++= { + scaladocExtractorSkipToken := { + if (scalaBinaryVersion.value == "3") { + "// skip-doc-5f98a5e" + } else { + scaladocExtractorSkipToken.value + } + }, + scalacOptions ++= { + if (scalaBinaryVersion.value == "3") { + Seq.empty + Seq( + "-Wconf:cat=deprecation&msg=.*(reflectiveSelectableFromLangReflectiveCalls|DeprecatedSqlParser|missing .*ToSql).*:s" + ) + } else { + Seq( + "-Xlog-free-terms", + "-P:silencer:globalFilters=missing\\ in\\ object\\ ToSql\\ is\\ deprecated;possibilities\\ in\\ class\\ ColumnNotFound\\ is\\ deprecated;DeprecatedSqlParser\\ in\\ package\\ anorm\\ is\\ deprecated;constructor\\ deprecatedName\\ in\\ class\\ deprecatedName\\ is\\ deprecated" + ) + } + }, + Test / scalacOptions ++= { if (scalaBinaryVersion.value == "2.13") { Seq("-Ypatmat-exhaust-depth", "off", "-P:silencer:globalFilters=multiarg\\ infix\\ syntax") } else { @@ -116,7 +152,7 @@ lazy val `anorm-core` = project } }, mimaPreviousArtifacts := { - if (scalaBinaryVersion.value == "2.13") { + if (scalaBinaryVersion.value == "3") { Set.empty } else { mimaPreviousArtifacts.value @@ -149,29 +185,25 @@ lazy val `anorm-core` = project ProblemFilters.exclude[DirectMissingMethodProblem]( // deprecated 2.3.8 "anorm.SqlQuery.statement" ), + incoRet("anorm.ParameterValue.apply"), // private: - ProblemFilters.exclude[MissingClassProblem]( // macro - "anorm.Macro$ImplicitResolver$2$ImplicitTransformer$" - ), - ProblemFilters.exclude[MissingClassProblem]( // macro - "anorm.Macro$ImplicitResolver$2$Implicit$" - ), - ProblemFilters.exclude[MissingClassProblem]( // macro - "anorm.Macro$ImplicitResolver$2$Implicit" - ), ProblemFilters.exclude[DirectMissingMethodProblem]( // private "anorm.Sql.asTry" ), - ProblemFilters.exclude[ReversedMissingMethodProblem]("anorm.JavaTimeToStatement.localDateToStatement") + ProblemFilters.exclude[ReversedMissingMethodProblem]("anorm.JavaTimeToStatement.localDateToStatement"), + coreMimaFilter, + // was deprecated + ProblemFilters.exclude[IncompatibleMethTypeProblem]("anorm.ColumnNotFound.copy"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("anorm.ColumnNotFound.copy$default$2") ), libraryDependencies ++= Seq( "joda-time" % "joda-time" % "2.11.1", "org.joda" % "joda-convert" % "2.2.2", "org.scala-lang.modules" %% "scala-parser-combinators" % parserCombinatorsVer.value, - "com.h2database" % "h2" % "2.1.214" % Test, - acolyte, - "com.chuusai" %% "shapeless" % "2.3.9" % Test - ) ++ specs2Test + "org.scala-lang.modules" %% "scala-xml" % xmlVer.value % Test, + "com.h2database" % "h2" % "2.1.214" % Test, + acolyte + ) ++ specs2Test, ) ++ armShading ) .dependsOn(`anorm-tokenizer`) @@ -179,19 +211,30 @@ lazy val `anorm-core` = project lazy val `anorm-iteratee` = (project in file("iteratee")) .settings( sourceDirectory := { - if (scalaBinaryVersion.value == "2.13") new java.io.File("/no/sources") + val v = scalaBinaryVersion.value + + if (v == "3" || v == "2.13") new java.io.File("/no/sources") else sourceDirectory.value }, mimaPreviousArtifacts := { - if (scalaBinaryVersion.value == "2.13") Set.empty[ModuleID] - else Set(organization.value %% name.value % "2.6.0") + val v = scalaBinaryVersion.value + + if (v == "3" || v == "2.13") Set.empty[ModuleID] + else Set(organization.value %% name.value % "2.6.10") + }, + publish / skip := { + val v = scalaBinaryVersion.value + + v == "3" || v == "2.13" }, - publish / skip := { scalaBinaryVersion.value == "2.13" }, libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.13") Seq.empty[ModuleID] + val v = scalaBinaryVersion.value + + if (v == "3" || v == "2.13") Seq.empty[ModuleID] else Seq( - "com.typesafe.play" %% "play-iteratees" % "2.6.1", + "com.typesafe.play" %% "play-iteratees" % "2.6.1", + "org.scala-lang.modules" %% "scala-xml" % xmlVer.value % Test, acolyte ) ++ specs2Test } @@ -202,7 +245,10 @@ lazy val `anorm-iteratee` = (project in file("iteratee")) lazy val akkaVer = Def.setting[String] { sys.env.get("AKKA_VERSION").getOrElse { - if (scalaBinaryVersion.value == "2.11") "2.4.20" + val v = scalaBinaryVersion.value + + if (v == "2.11") "2.4.20" + else if (v == "3") "2.6.19" else "2.5.32" } } @@ -214,14 +260,31 @@ val akkaContribVer = Def.setting[String] { lazy val `anorm-akka` = (project in file("akka")) .settings( - mimaPreviousArtifacts := Set.empty, + mimaPreviousArtifacts := { + if (scalaBinaryVersion.value == "3") { + Set.empty + } else { + mimaPreviousArtifacts.value + } + }, libraryDependencies ++= Seq("akka-testkit", "akka-stream").map { m => - "com.typesafe.akka" %% m % akkaVer.value % Provided + ("com.typesafe.akka" %% m % akkaVer.value % Provided).exclude("org.scala-lang.modules", "*") }, - libraryDependencies ++= (acolyte +: specs2Test) ++ Seq( - "com.typesafe.akka" %% "akka-stream-contrib" % akkaContribVer.value % Test + libraryDependencies ++= Seq( + acolyte, + "org.scala-lang.modules" %% "scala-xml" % xmlVer.value % Test + ) ++ specs2Test ++ Seq( + ("com.typesafe.akka" %% "akka-stream-contrib" % akkaContribVer.value % Test) + .cross(CrossVersion.for3Use2_13) + .exclude("com.typesafe.akka", "*") ), - scalacOptions += "-P:silencer:globalFilters=deprecated", + scalacOptions ++= { + if (scalaBinaryVersion.value == "3") { + Seq("-Wconf:cat=deprecation&msg=.*(onDownstreamFinish|ActorMaterializer).*:s") + } else { + Seq("-P:silencer:globalFilters=deprecated") + } + }, Test / unmanagedSourceDirectories ++= { CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, n)) if n < 13 => @@ -240,7 +303,7 @@ lazy val `anorm-akka` = (project in file("akka")) lazy val pgVer = sys.env.get("POSTGRES_VERSION").getOrElse("42.5.0") val playVer = Def.setting[String] { - if (scalaVersion.value.startsWith("2.13")) "2.7.3" + if (scalaBinaryVersion.value == "2.13") "2.7.3" else "2.6.14" } @@ -248,14 +311,18 @@ lazy val `anorm-postgres` = (project in file("postgres")) .settings( mimaPreviousArtifacts := Set.empty, libraryDependencies ++= { + val v = scalaBinaryVersion.value + val playJsonVer = { - if (scalaBinaryVersion.value == "2.13") "2.9.2" + if (v == "2.13") "2.9.2" + else if (v == "3") "2.10.0-RC6" else "2.6.7" } Seq( - "org.postgresql" % "postgresql" % pgVer, - "com.typesafe.play" %% "play-json" % playJsonVer + "org.postgresql" % "postgresql" % pgVer, + "com.typesafe.play" %% "play-json" % playJsonVer, + "org.scala-lang.modules" %% "scala-xml" % xmlVer.value % Test ) ++ specs2Test :+ acolyte } ) @@ -263,8 +330,23 @@ lazy val `anorm-postgres` = (project in file("postgres")) lazy val `anorm-enumeratum` = (project in file("enumeratum")) .settings( + sourceDirectory := { + if (scalaBinaryVersion.value == "3") new java.io.File("/no/sources") + else sourceDirectory.value + }, + publish / skip := { scalaBinaryVersion.value == "3" }, mimaPreviousArtifacts := Set.empty, - libraryDependencies ++= Seq("com.beachape" %% "enumeratum" % "1.7.0", acolyte) ++ specs2Test + libraryDependencies ++= { + if (scalaBinaryVersion.value != "3") { + Seq( + "org.scala-lang.modules" %% "scala-xml" % xmlVer.value % Test, + "com.beachape" %% "enumeratum" % "1.7.0", + acolyte + ) ++ specs2Test + } else { + Seq.empty + } + } ) .dependsOn(`anorm-core`) @@ -294,14 +376,13 @@ lazy val docs = project ) .dependsOn(`anorm-core`) -Scapegoat.settings - ThisBuild / playBuildRepoName := "anorm" addCommandAlias( "validateCode", List( + "scalafixAll -check", "scalafmtSbtCheck", - "scalafmtCheckAll", + "+scalafmtCheckAll" ).mkString(";") ) diff --git a/core/src/main/scala/anorm/Macro.scala b/core/src/main/scala-2/anorm/Macro.scala similarity index 79% rename from core/src/main/scala/anorm/Macro.scala rename to core/src/main/scala-2/anorm/Macro.scala index 89ba7261..1fd95b8e 100644 --- a/core/src/main/scala/anorm/Macro.scala +++ b/core/src/main/scala-2/anorm/Macro.scala @@ -18,116 +18,17 @@ import com.github.ghik.silencer.silent * @define projectionParam The optional projection for the properties as parameters; If none, using the all the class properties. * @define valueClassTParam the type of the value class */ -object Macro { +object Macro extends MacroOptions { import scala.language.experimental.macros import scala.reflect.macros.whitebox - /** Only for internal purposes */ - final class Placeholder {} - - /** Only for internal purposes */ - object Placeholder { - implicit object Parser extends RowParser[Placeholder] { - val success = Success(new Placeholder()) - - def apply(row: Row) = success - } - } - - /** - * Naming strategy, to map each class property to the corresponding column. - */ - trait ColumnNaming extends (String => String) { - - /** - * Returns the column name for the class property. - * - * @param property the name of the case class property - */ - def apply(property: String): String - } - - /** Naming companion */ - object ColumnNaming { - - /** Keep the original property name. */ - object Identity extends ColumnNaming { - def apply(property: String) = property - } - - /** - * For each class property, use the snake case equivalent - * to name its column (e.g. fooBar -> foo_bar). - */ - object SnakeCase extends ColumnNaming { - private val re = "[A-Z]+".r - - def apply(property: String): String = - re.replaceAllIn(property, { m => s"_${m.matched.toLowerCase}" }) - } - - /** Naming using a custom transformation function. */ - def apply(transformation: String => String): ColumnNaming = - new ColumnNaming { - def apply(property: String): String = transformation(property) - } - } - - trait Discriminate extends (String => String) { - - /** - * Returns the value representing the specified type, - * to be used as a discriminator within a sealed family. - * - * @param tname the name of type (class or object) to be discriminated - */ - def apply(tname: String): String - } - - object Discriminate { - sealed class Function(f: String => String) extends Discriminate { - def apply(tname: String) = f(tname) - } - - /** Uses the type name as-is as value for the discriminator */ - object Identity extends Function(identity[String]) - - /** Returns a `Discriminate` function from any `String => String`. */ - def apply(discriminate: String => String): Discriminate = - new Function(discriminate) - } - - trait DiscriminatorNaming extends (String => String) { - - /** - * Returns the name for the discriminator column. - * @param familyType the name of the famility type (sealed trait) - */ - def apply(familyType: String): String - } - - object DiscriminatorNaming { - sealed class Function(f: String => String) extends DiscriminatorNaming { - def apply(familyType: String) = f(familyType) - } - - /** Always use "classname" as name for the discriminator column. */ - object Default extends Function(_ => "classname") - - /** Returns a naming according from any `String => String`. */ - def apply(naming: String => String): DiscriminatorNaming = - new Function(naming) - } - - // --- - - def namedParserImpl[T: c.WeakTypeTag](c: whitebox.Context): c.Expr[T] = { + def namedParserImpl[T: c.WeakTypeTag](c: whitebox.Context): c.Expr[RowParser[T]] = { import c.universe._ parserImpl[T](c) { (t, n, _) => q"anorm.SqlParser.get[$t]($n)" } } - def namedParserImpl1[T: c.WeakTypeTag](c: whitebox.Context)(naming: c.Expr[ColumnNaming]): c.Expr[T] = { + def namedParserImpl1[T: c.WeakTypeTag](c: whitebox.Context)(naming: c.Expr[ColumnNaming]): c.Expr[RowParser[T]] = { import c.universe._ parserImpl[T](c) { (t, n, _) => q"anorm.SqlParser.get[$t]($naming($n))" } @@ -135,10 +36,10 @@ object Macro { @deprecated("Use [[namedParserImpl2]]", "2.5.2") @SuppressWarnings(Array("MethodNames" /*deprecated*/ )) - def namedParserImpl_[T: c.WeakTypeTag](c: whitebox.Context)(names: c.Expr[String]*): c.Expr[T] = + def namedParserImpl_[T: c.WeakTypeTag](c: whitebox.Context)(names: c.Expr[String]*): c.Expr[RowParser[T]] = namedParserImpl2[T](c)(names: _*) - def namedParserImpl2[T: c.WeakTypeTag](c: whitebox.Context)(names: c.Expr[String]*): c.Expr[T] = { + def namedParserImpl2[T: c.WeakTypeTag](c: whitebox.Context)(names: c.Expr[String]*): c.Expr[RowParser[T]] = { import c.universe._ namedParserImpl4[T](c)(names) { n => q"$n" } @@ -146,7 +47,7 @@ object Macro { def namedParserImpl3[T: c.WeakTypeTag]( c: whitebox.Context - )(naming: c.Expr[ColumnNaming], names: c.Expr[String]*): c.Expr[T] = { + )(naming: c.Expr[ColumnNaming], names: c.Expr[String]*): c.Expr[RowParser[T]] = { import c.universe._ namedParserImpl4[T](c)(names) { n => q"$naming($n)" } @@ -154,7 +55,7 @@ object Macro { private def namedParserImpl4[T: c.WeakTypeTag]( c: whitebox.Context - )(names: Seq[c.Expr[String]])(naming: c.Expr[String] => c.universe.Tree): c.Expr[T] = { + )(names: Seq[c.Expr[String]])(naming: c.Expr[String] => c.universe.Tree): c.Expr[RowParser[T]] = { import c.universe._ val tpe = c.weakTypeTag[T].tpe @@ -165,7 +66,10 @@ object Macro { def psz = params.size if (names.size < psz) { - c.abort(c.enclosingPosition, s"no column name for parameters: ${show(names)} < $params") + c.abort( + c.enclosingPosition, + s"no column name for parameters: ${names.map(n => show(n)).mkString(", ")} < ${params.map(_.name).mkString(", ")}" + ) } else { parserImpl[T](c) { (t, _, i) => @@ -181,7 +85,7 @@ object Macro { } } - def offsetParserImpl[T: c.WeakTypeTag](c: whitebox.Context)(offset: c.Expr[Int]): c.Expr[T] = { + def offsetParserImpl[T: c.WeakTypeTag](c: whitebox.Context)(offset: c.Expr[Int]): c.Expr[RowParser[T]] = { import c.universe._ parserImpl[T](c) { (t, _, i) => @@ -189,7 +93,7 @@ object Macro { } } - def indexedParserImpl[T: c.WeakTypeTag](c: whitebox.Context): c.Expr[T] = { + def indexedParserImpl[T: c.WeakTypeTag](c: whitebox.Context): c.Expr[RowParser[T]] = { import c.universe._ @silent def p = reify(0) @@ -221,7 +125,7 @@ object Macro { private def parserImpl[T: c.WeakTypeTag](c: whitebox.Context)( genGet: (c.universe.Type, String, Int) => c.universe.Tree - ): c.Expr[T] = anorm.macros.RowParserImpl[T](c)(genGet) + ): c.Expr[RowParser[T]] = anorm.macros.RowParserImpl[T](c)(genGet) /** * Returns a row parser generated for a case class `T`, @@ -501,16 +405,16 @@ object Macro { // --- - /** - * @param propertyName the name of the class property - * @param parameterName the name of for the parameter, - * if different from the property one, otherwise `None` - */ - case class ParameterProjection(propertyName: String, parameterName: Option[String] = None) + /** Only for internal purposes */ + final class Placeholder {} - object ParameterProjection { - def apply(propertyName: String, parameterName: String): ParameterProjection = - ParameterProjection(propertyName, Option(parameterName)) + /** Only for internal purposes */ + object Placeholder { + implicit object Parser extends RowParser[Placeholder] { + val success = Success(new Placeholder()) + + def apply(row: Row) = success + } } private[anorm] lazy val debugEnabled = diff --git a/core/src/main/scala-2/anorm/PackageCompat.scala b/core/src/main/scala-2/anorm/PackageCompat.scala new file mode 100644 index 00000000..17094eb4 --- /dev/null +++ b/core/src/main/scala-2/anorm/PackageCompat.scala @@ -0,0 +1,9 @@ +package anorm + +import scala.language.implicitConversions + +private[anorm] trait PackageCompat { + // TODO: Review implicit usage there + // (add explicit functions on SqlQuery?) + implicit def sqlToSimple(sql: SqlQuery): SimpleSql[Row] = sql.asSimple +} diff --git a/core/src/main/scala/anorm/macros/Implicit.scala b/core/src/main/scala-2/anorm/macros/Implicit.scala similarity index 100% rename from core/src/main/scala/anorm/macros/Implicit.scala rename to core/src/main/scala-2/anorm/macros/Implicit.scala diff --git a/core/src/main/scala/anorm/macros/ImplicitResolver.scala b/core/src/main/scala-2/anorm/macros/ImplicitResolver.scala similarity index 100% rename from core/src/main/scala/anorm/macros/ImplicitResolver.scala rename to core/src/main/scala-2/anorm/macros/ImplicitResolver.scala diff --git a/core/src/main/scala/anorm/macros/Inspect.scala b/core/src/main/scala-2/anorm/macros/Inspect.scala similarity index 97% rename from core/src/main/scala/anorm/macros/Inspect.scala rename to core/src/main/scala-2/anorm/macros/Inspect.scala index 6c04b5bb..b37fb59c 100644 --- a/core/src/main/scala/anorm/macros/Inspect.scala +++ b/core/src/main/scala-2/anorm/macros/Inspect.scala @@ -70,8 +70,9 @@ private[anorm] object Inspect { if (apply.paramLists.isEmpty) { Map.empty } else { - Compat.toMap(Compat.lazyZip(apply.typeParams, tpeArgs)) { case (sym, ty) => - sym.fullName -> ty + Compat.toMap(Compat.lazyZip(apply.typeParams, tpeArgs)) { + case (sym, ty) => + sym.fullName -> ty } } } diff --git a/core/src/main/scala/anorm/macros/RowParserImpl.scala b/core/src/main/scala-2/anorm/macros/RowParserImpl.scala similarity index 98% rename from core/src/main/scala/anorm/macros/RowParserImpl.scala rename to core/src/main/scala-2/anorm/macros/RowParserImpl.scala index f98679b5..e3bf4619 100644 --- a/core/src/main/scala/anorm/macros/RowParserImpl.scala +++ b/core/src/main/scala-2/anorm/macros/RowParserImpl.scala @@ -9,7 +9,7 @@ import anorm.macros.Inspect.pretty private[anorm] object RowParserImpl { def apply[T: c.WeakTypeTag]( c: whitebox.Context - )(genGet: (c.universe.Type, String, Int) => c.universe.Tree): c.Expr[T] = { + )(genGet: (c.universe.Type, String, Int) => c.universe.Tree): c.Expr[RowParser[T]] = { val tpe = c.weakTypeTag[T].tpe @inline def abort(msg: String) = c.abort(c.enclosingPosition, msg) @@ -141,6 +141,6 @@ private[anorm] object RowParserImpl { c.echo(c.enclosingPosition, s"row parser generated for $tpe: ${pretty(c)(parser)}") } - c.Expr(c.typecheck(parser)) + c.Expr[RowParser[T]](c.typecheck(parser)) } } diff --git a/core/src/main/scala/anorm/macros/SealedRowParserImpl.scala b/core/src/main/scala-2/anorm/macros/SealedRowParserImpl.scala similarity index 95% rename from core/src/main/scala/anorm/macros/SealedRowParserImpl.scala rename to core/src/main/scala-2/anorm/macros/SealedRowParserImpl.scala index fb5f4d31..eaaae489 100644 --- a/core/src/main/scala/anorm/macros/SealedRowParserImpl.scala +++ b/core/src/main/scala-2/anorm/macros/SealedRowParserImpl.scala @@ -57,12 +57,8 @@ private[anorm] object SealedRowParserImpl { val caseName = TermName(c.freshName("discriminated")) val key = q"$discriminate(${subclass.typeSymbol.fullName})" val caseDecl = q"val $caseName = $key" - val subtype = { - if (subclass.typeSymbol.asClass.typeParams.isEmpty) subclass - else subclass.erasure - } - (key, caseDecl, cq"`$caseName` => implicitly[anorm.RowParser[$subtype]]") + (key, caseDecl, cq"`$caseName` => implicitly[anorm.RowParser[$subclass]]") } lazy val supported = q"List(..${cases.map(_._1)})" diff --git a/core/src/main/scala/anorm/macros/ToParameterListImpl.scala b/core/src/main/scala-2/anorm/macros/ToParameterListImpl.scala similarity index 91% rename from core/src/main/scala/anorm/macros/ToParameterListImpl.scala rename to core/src/main/scala-2/anorm/macros/ToParameterListImpl.scala index 90823139..ba92f526 100644 --- a/core/src/main/scala/anorm/macros/ToParameterListImpl.scala +++ b/core/src/main/scala-2/anorm/macros/ToParameterListImpl.scala @@ -21,9 +21,20 @@ private[anorm] object ToParameterListImpl { import c.universe._ + val toParamLstTpe = c.weakTypeTag[ToParameterList[_]].tpe + val cases = subclasses.map { subcls => - cq"v: ${subcls} => implicitly[_root_.anorm.ToParameterList[${subcls}]].apply(v)" + val ptype = appliedType(toParamLstTpe, List(subcls)) + + c.inferImplicitValue(ptype) match { + case EmptyTree => + c.abort(c.enclosingPosition, s"Missing ToParameterList[${subcls}]") + + case toParams => + cq"v: ${subcls} => $toParams(v)" + } } + val arg = TermName(c.freshName("arg")) val mat = Match(q"${arg}", cases) @@ -88,8 +99,9 @@ private[anorm] object ToParameterListImpl { } // All supported class properties - val properties = ctor.paramLists.take(1).flatten.collect { case term: TermSymbol => - term + val properties = ctor.paramLists.take(1).flatten.collect { + case term: TermSymbol => + term } // Among the properties, according the specified projection @@ -215,12 +227,14 @@ private[anorm] object ToParameterListImpl { } } - val appendCalls = effectiveProj.flatMap { case (propName, paramName) => - appendParameters.get(propName).map { case (append, _) => - // Find the previously generated append function for the property, - // and applies it with the parameter name - q"${append}(${paramName})" - } + val appendCalls = effectiveProj.flatMap { + case (propName, paramName) => + appendParameters.get(propName).map { + case (append, _) => + // Find the previously generated append function for the property, + // and applies it with the parameter name + q"${append}(${paramName})" + } } val resultCall = q"${bufName}.result()" diff --git a/core/src/main/scala/anorm/macros/ValueColumnImpl.scala b/core/src/main/scala-2/anorm/macros/ValueColumnImpl.scala similarity index 100% rename from core/src/main/scala/anorm/macros/ValueColumnImpl.scala rename to core/src/main/scala-2/anorm/macros/ValueColumnImpl.scala diff --git a/core/src/main/scala/anorm/macros/ValueToStatementImpl.scala b/core/src/main/scala-2/anorm/macros/ValueToStatementImpl.scala similarity index 100% rename from core/src/main/scala/anorm/macros/ValueToStatementImpl.scala rename to core/src/main/scala-2/anorm/macros/ValueToStatementImpl.scala diff --git a/core/src/main/scala-3/anorm/Macro.scala b/core/src/main/scala-3/anorm/Macro.scala new file mode 100644 index 00000000..f1d3c904 --- /dev/null +++ b/core/src/main/scala-3/anorm/Macro.scala @@ -0,0 +1,572 @@ +package anorm + +import scala.quoted.{ Expr, FromExpr, Quotes, ToExpr, Type } + +/** + * @define caseTParam the type of case class + * @define namingParam the column naming, to resolve the column name for each case class property + * @define namesParam the names of the columns corresponding to the case class properties + * @define sealedParserDoc Returns a row parser generated + * for a sealed class family. + * Each direct known subclasses `C` must be provided with an appropriate + * `RowParser[C]` in the implicit scope. + * + * @define discriminatorNamingParam the naming function for the discriminator column + * @define discriminateParam the discriminating function applied to each name of the family type + * @define familyTParam the type of the type family (either a sealed trait or abstract class) + * @define separatorParam the separator used with nested properties + * @define projectionParam The optional projection for the properties as parameters; If none, using the all the class properties. + * @define valueClassTParam the type of the value class + */ +object Macro extends MacroOptions with macros.ValueColumn with macros.ValueToStatement: + + /** + * Returns a row parser generated for a case class `T`, + * getting column values by name. + * + * @tparam T $caseTParam + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(v: Int) + * + * val p: RowParser[YourCaseClass] = Macro.namedParser[YourCaseClass] + * }}} + */ + inline def namedParser[T]: RowParser[T] = ${ namedParserImpl[T] } + + /** + * Returns a row parser generated for a case class `T`, + * getting column values by name. + * + * @tparam T $caseTParam + * @param naming $namingParam + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(v: Int) + * + * val p: RowParser[YourCaseClass] = Macro.namedParser[YourCaseClass] + * }}} + */ + inline def namedParser[T](inline naming: ColumnNaming): RowParser[T] = + ${ namedParserImpl1[T]('naming) } + + /** + * Returns a row parser generated for a case class `T`, + * getting column values according the property `names`. + * + * @tparam T $caseTParam + * @param names $namesParam + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(a: Int, b: String) + * + * val p: RowParser[YourCaseClass] = + * Macro.parser[YourCaseClass]("foo", "bar") + * }}} + */ + inline def parser[T](inline names: String*): RowParser[T] = + ${ namedParserImpl3[T]('names) } + + /** + * Returns a row parser generated for a case class `T`, + * getting column values according the property `names`. + * + * @tparam T $caseTParam + * + * @param naming $namingParam + * @param names $namesParam + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(a: Int, b: String) + * + * val p: RowParser[YourCaseClass] = + * Macro.parser[YourCaseClass]("foo", "loremIpsum") + * }}} + */ + inline def parser[T](inline naming: ColumnNaming, inline names: String*): RowParser[T] = + ${ namedParserImpl2[T]('naming, 'names) } + + /** + * Returns a row parser generated for a case class `T`, + * getting column values by position. + * + * @tparam T $caseTParam + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(v: Int) + * + * val p: RowParser[YourCaseClass] = Macro.indexedParser[YourCaseClass] + * }}} + */ + inline def indexedParser[T]: RowParser[T] = ${ indexedParserImpl[T] } + + /** + * Returns a row parser generated for a case class `T`, + * getting column values by position, with an offset. + * + * @tparam T $caseTParam + * @param offset the offset of column to be considered by the parser + * + * {{{ + * import anorm.{ Macro, RowParser } + * + * case class YourCaseClass(v: Int) + * + * val p: RowParser[YourCaseClass] = Macro.offsetParser[YourCaseClass](2) + * }}} + */ + inline def offsetParser[T](inline offset: Int): RowParser[T] = + ${ offsetParserImpl[T]('offset) } + + /** + * $sealedParserDoc + * The default naming is used. + * + * @tparam T $familyTParam + */ + inline def sealedParser[T]: RowParser[T] = ${ sealedParserImpl1[T] } + + /** + * $sealedParserDoc + * + * @param naming $discriminatorNamingParam + * @tparam T $familyTParam + */ + inline def sealedParser[T](inline naming: DiscriminatorNaming): RowParser[T] = + ${ sealedParserImpl2[T]('naming) } + + /** + * $sealedParserDoc + * + * @param discriminate $discriminateParam + * @tparam T $familyTParam + */ + inline def sealedParser[T](inline discriminate: Discriminate): RowParser[T] = + ${ sealedParserImpl3[T]('discriminate) } + + /** + * $sealedParserDoc + * + * @param naming $discriminatorNamingParam + * @param discriminate $discriminateParam + * @tparam T $familyTParam + */ + inline def sealedParser[T](inline naming: DiscriminatorNaming, discriminate: Discriminate): RowParser[T] = + ${ sealedParserImpl[T]('naming, 'discriminate) } + + // --- + + /** + * Returns a column parser for specified value class. + * + * {{{ + * import anorm._ + * + * class ValueClassType(val v: Int) extends AnyVal + * + * implicit val column: Column[ValueClassType] = + * Macro.valueColumn[ValueClassType] + * }}} + * + * @tparam T $valueClassTParam + */ + inline def valueColumn[T <: AnyVal]: Column[T] = + ${ valueColumnImpl[T] } + + // --- + + /** + * @param separator $separatorParam + * @tparam T $caseTParam + * + * {{{ + * import anorm.{ Macro, ToParameterList } + * + * case class Bar(v: Float) + * + * // Bar must be a case class, or a sealed trait with known subclasses + * implicit val toParams: ToParameterList[Bar] = Macro.toParameters[Bar] + * }}} + */ + inline def toParameters[T]: ToParameterList[T] = ${ defaultParameters[T] } + + /** + * @param separator $separatorParam + * @tparam T $caseTParam + * + * {{{ + * import anorm.{ Macro, ToParameterList } + * + * case class Bar(v: String) + * + * // Bar must be a case class + * implicit val toParams: ToParameterList[Bar] = + * Macro.toParameters[Bar]("_") + * }}} + */ + inline def toParameters[T](inline separator: String): ToParameterList[T] = + ${ parametersDefaultNames[T]('separator) } + + /** + * @param projection $projectionParam + * @tparam T $caseTParam + * + * {{{ + * import anorm.{ Macro, ToParameterList } + * + * case class Bar(v: Int) + * + * // Bar must be a case class + * implicit val toParams: ToParameterList[Bar] = + * Macro.toParameters[Bar]() + * }}} + */ + inline def toParameters[T](inline projection: ParameterProjection*): ToParameterList[T] = ${ + configuredParameters[T]('projection) + } + + /** + * @param separator $separatorParam + * @param projection $projectionParam + * @tparam T $caseTParam + */ + inline def toParameters[T](inline separator: String, projection: ParameterProjection*): ToParameterList[T] = ${ + parametersWithSeparator[T]('separator, 'projection) + } + + // --- + + /** + * Returns a `ToStatement` for the specified ValueClass. + * + * {{{ + * import anorm._ + * + * class ValueClassType(val i: Int) extends AnyVal + * + * implicit val instance: ToStatement[ValueClassType] = + * Macro.valueToStatement[ValueClassType] + * }}} + * + * @tparam T $valueClassTParam + */ + inline def valueToStatement[T <: AnyVal]: ToStatement[T] = + ${ valueToStatementImpl[T] } + + // --- + + private def namedParserImpl[A](using q: Quotes, tpe: Type[A]): Expr[RowParser[A]] = { + parserImpl[A](q) { + [T] => (_: Type[T]) ?=> (col: Expr[Column[T]], n: String, _: Int) => '{ SqlParser.get[T](${ Expr(n) })($col) } + } + } + + private def namedParserImpl1[A]( + naming: Expr[ColumnNaming] + )(using q: Quotes, tpe: Type[A], colNme: Type[ColumnNaming]): Expr[RowParser[A]] = { + parserImpl[A](q) { + [T] => + (_: Type[T]) ?=> (col: Expr[Column[T]], n: String, _: Int) => '{ SqlParser.get[T]($naming(${ Expr(n) }))($col) } + } + } + + private def namedParserImpl2[T]( + naming: Expr[ColumnNaming], + names: Expr[Seq[String]] + )(using + Quotes, + Type[T], + Type[ColumnNaming] + ): Expr[RowParser[T]] = + namedParserImpl4[T](names) { n => '{ $naming($n) } } + + private def namedParserImpl3[T](names: Expr[Seq[String]])(using + Quotes, + Type[T], + Type[ColumnNaming] + ): Expr[RowParser[T]] = namedParserImpl4[T](names)(identity) + + private def namedParserImpl4[A]( + names: Expr[Seq[String]] + )(naming: Expr[String] => Expr[String])(using q: Quotes, tpe: Type[A]): Expr[RowParser[A]] = { + + import q.reflect.* + + val repr = TypeRepr.of[A](using tpe) + val ctor = repr.typeSymbol.primaryConstructor + + val params = ctor.paramSymss.map(_.filterNot(_.isType)).flatten + + @SuppressWarnings(Array("ListSize")) + def psz = params.size + + val ns = names.valueOrAbort + + if (ns.size < psz) { + report.errorAndAbort(s"no column name for parameters: ${ns.mkString(", ")} < ${params.mkString("[", ", ", "]")}") + + } else { + parserImpl[A](q) { + [T] => + (_: Type[T]) ?=> + (col: Expr[Column[T]], _: String, i: Int) => + ns.lift(i) match { + case Some(n) => { + val cn = naming(Expr(n)) + + '{ SqlParser.get[T]($cn)($col) } + } + + case _ => + report.errorAndAbort(s"missing column name for parameter $i") + } + } + } + } + + private def offsetParserImpl[A](offset: Expr[Int])(using q: Quotes, tpe: Type[A]): Expr[RowParser[A]] = { + parserImpl[A](q) { + [T] => + (_: Type[T]) ?=> + (col: Expr[Column[T]], _: String, i: Int) => '{ SqlParser.get[T]($offset + ${ Expr(i + 1) })($col) } + } + } + + private def indexedParserImpl[T](using Quotes, Type[T]): Expr[RowParser[T]] = + offsetParserImpl[T]('{ 0 }) + + private def sealedParserImpl1[T](using Quotes, Type[T]): Expr[RowParser[T]] = { + def discriminator = '{ Macro.DiscriminatorNaming.Default } + def discriminate = '{ Macro.Discriminate.Identity } + + sealedParserImpl(discriminator, discriminate) + } + + private def sealedParserImpl2[T](naming: Expr[DiscriminatorNaming])(using Quotes, Type[T]): Expr[RowParser[T]] = + sealedParserImpl(naming, '{ Macro.Discriminate.Identity }) + + private def sealedParserImpl3[T](discriminate: Expr[Discriminate])(using Quotes, Type[T]): Expr[RowParser[T]] = + sealedParserImpl('{ Macro.DiscriminatorNaming.Default }, discriminate) + + private def sealedParserImpl[T](naming: Expr[DiscriminatorNaming], discriminate: Expr[Discriminate])(using + Quotes, + Type[T] + ): Expr[RowParser[T]] = + macros.SealedRowParserImpl[T](naming, discriminate) + + inline private def withParser[T](f: RowParser[T] => (Row => SqlResult[T])): RowParser[T] = new RowParser[T] { self => + lazy val underlying = f(self) + + def apply(row: Row): SqlResult[T] = underlying(row) + } + + /** + * @tparam T the field type + */ + private[anorm] type RowParserGenerator = + [T] => (fieldType: Type[T]) ?=> (column: Expr[Column[T]], fieldName: String, fieldIndex: Int) => Expr[RowParser[T]] + + /** + * @tparam A the case class type + * @param genGet the function applied to each field of case class `A` + */ + private def parserImpl[A](q: Quotes)(genGet: RowParserGenerator)(using Type[A]): Expr[RowParser[A]] = { + given quotes: Quotes = q + + '{ + withParser[A] { self => + ${ macros.RowParserImpl[A](q, 'self)(genGet) } + } + } + } + + // --- + + private[anorm] given parameterProjectionFromExpr: FromExpr[ParameterProjection] = new FromExpr[ParameterProjection] { + def unapply(expr: Expr[ParameterProjection])(using q: Quotes): Option[ParameterProjection] = { + import q.reflect.* + + val strTpr = TypeRepr.of[String] + + @annotation.tailrec + def rec(term: Term): Option[ParameterProjection] = term match { + case Block(stats, e) => + if stats.isEmpty then rec(e) else None + + case Inlined(_, bindings, e) => + if bindings.isEmpty then rec(e) else None + + case Typed(e, _) => + rec(e) + + case Apply(meth, propNme :: paramNme :: Nil) + if meth.symbol.fullName.endsWith(f"ParameterProjection$$.apply") => { + val strFrom = FromExpr.StringFromExpr[String] + + for { + propertyName <- strFrom.unapply(propNme.asExprOf[String]) + parameterName <- { + if (paramNme.tpe <:< strTpr) { + strFrom.unapply(paramNme.asExprOf[String]).map(Option(_)) + } else { + FromExpr.OptionFromExpr[String].unapply(paramNme.asExprOf[Option[String]]) + } + } + } yield ParameterProjection(propertyName, parameterName) + } + + case _ => + None + } + + rec(expr.asTerm) + } + } + + private[anorm] given parameterProjectionToExpr: ToExpr[ParameterProjection] = + new ToExpr[ParameterProjection] { + def apply(p: ParameterProjection)(using q: Quotes): Expr[ParameterProjection] = { + import q.reflect.* + + val propertyName = Expr(p.propertyName) + val parameterName = Expr(p.parameterName) + + '{ Macro.ParameterProjection($propertyName, $parameterName) } + } + } + + private def parametersDefaultNames[T](separator: Expr[String])(using + q: Quotes, + tpe: Type[T], + proj: Type[ParameterProjection] + ): Expr[ToParameterList[T]] = { + import q.reflect.* + + '{ + withSelfToParameterList[T] { selfRef => + ${ + macros.ToParameterListImpl.caseClass[T]( + forwardExpr = 'selfRef, + projection = Expr(Seq.empty[ParameterProjection]), + separator = separator + ) + } + } + } + } + + private def configuredParameters[T]( + projection: Expr[Seq[ParameterProjection]] + )(using q: Quotes, tpe: Type[T], proj: Type[ParameterProjection]): Expr[ToParameterList[T]] = { + import q.reflect.* + + '{ + withSelfToParameterList[T] { selfRef => + ${ macros.ToParameterListImpl.caseClass[T]('selfRef, projection, '{ "_" }) } + } + } + } + + private def parametersWithSeparator[T]( + separator: Expr[String], + projection: Expr[Seq[ParameterProjection]] + )(using q: Quotes, tpe: Type[T]): Expr[ToParameterList[T]] = { + import q.reflect.* + + '{ + withSelfToParameterList[T] { selfRef => + ${ macros.ToParameterListImpl.caseClass[T]('selfRef, projection, separator) } + } + } + } + + private def defaultParameters[T](using + q: Quotes, + tpe: Type[T], + proj: Type[ParameterProjection] + ): Expr[ToParameterList[T]] = { + import q.reflect.* + + val repr = TypeRepr.of[T](using tpe) + val tpeSym = repr.typeSymbol + val flags = tpeSym.flags + + if (flags.is(Flags.Sealed) || flags.is(Flags.Abstract)) { + macros.ToParameterListImpl.sealedTrait[T] + } else if (!tpeSym.isClassDef || !flags.is(Flags.Case)) { + report.errorAndAbort(s"Either a sealed trait or a case class expected: $tpe") + + } else { + '{ + withSelfToParameterList[T] { selfRef => + ${ + macros.ToParameterListImpl.caseClass[T]( + forwardExpr = 'selfRef, + projection = Expr(Seq.empty[ParameterProjection]), + separator = '{ "_" } + ) + } + } + } + } + } + + // --- + + private def withColumn[T]( + f: Expr[Column[T]] => Expr[RowParser[T]] + )(using q: Quotes, tpe: Type[T]): Expr[RowParser[T]] = { + import q.reflect.* + + Expr.summon[Column[T]] match { + case Some(col) => + f(col) + + case _ => { + val repr = TypeRepr.of[T](using tpe) + + report.errorAndAbort(s"Missing Column[${repr.show}]") + } + } + } + + inline private def withSelfToParameterList[T]( + f: ToParameterList[T] => (T => List[NamedParameter]) + ): ToParameterList[T] = new ToParameterList[T] { self => + lazy val underlying = f(self) + + def apply(input: T): List[NamedParameter] = underlying(input) + } + + /** Only for internal purposes */ + final class Placeholder {} + + /** Only for internal purposes */ + object Placeholder { + implicit object Parser extends RowParser[Placeholder] { + val success = Success(new Placeholder()) + + def apply(row: Row) = success + } + } + + private[anorm] lazy val debugEnabled = + Option(System.getProperty("anorm.macro.debug")) + .filterNot(_.isEmpty) + .map(_.toLowerCase) + .map { v => + "true".equals(v) || v.substring(0, 1) == "y" + } + .getOrElse(false) + +end Macro diff --git a/core/src/main/scala-3/anorm/PackageCompat.scala b/core/src/main/scala-3/anorm/PackageCompat.scala new file mode 100644 index 00000000..4bbf3f2a --- /dev/null +++ b/core/src/main/scala-3/anorm/PackageCompat.scala @@ -0,0 +1,203 @@ +package anorm + +import java.sql.Connection + +import scala.util.Try + +private[anorm] trait PackageCompat: + + extension (query: SqlQuery) + /** + * Returns the query prepared with named parameters. + * + * {{{ + * import anorm._ + * + * val baseSql = SQL("SELECT * FROM table WHERE id = {id}") // one named param + * val preparedSql = baseSql.on("id" -> "value") + * }}} + */ + def on(args: NamedParameter*): SimpleSql[Row] = + query.asSimple.on(args: _*) + + /** + * Returns the query prepared with parameters using initial order + * of placeholder in statement. + * + * {{{ + * import anorm._ + * + * val baseSql = + * SQL("SELECT * FROM table WHERE name = {name} AND lang = {lang}") + * + * val preparedSql = baseSql.onParams("1st", "2nd") + * // 1st param = name, 2nd param = lang + * }}} + */ + def onParams(args: ParameterValue*): SimpleSql[Row] = { + val simple = query.asSimple + + simple.copy(params = simple.params ++ Sql.zipParams(simple.sql.paramsInitialOrder, args, Map.empty)) + } + + /** + * Returns the query prepared with the named parameters, + * provided by the appropriate `converter`. + * + * @param value the value to be converted as list of [[NamedParameter]] + * @param converter the function used to convert the `value` + * @tparam U the type of the value + */ + def bind[U](value: U)(using converter: ToParameterList[U]): SimpleSql[Row] = + query.asSimple.bind[U](value) + + def map[T](f: Row => T): SimpleSql[T] = { + val simple = query.asSimple + + simple.copy(defaultParser = simple.defaultParser.map(f)) + } + + /** Returns a copy with updated timeout. */ + def withQueryTimeout(seconds: Option[Int]): SimpleSql[Row] = { + val simple = query.asSimple + + simple.copy(sql = simple.sql.withQueryTimeout(seconds)) + } + + /** Returns a copy with updated flag. */ + def withResultSetOnFirstRow(onFirst: Boolean): SimpleSql[Row] = + query.asSimple.copy(resultSetOnFirstRow = onFirst) + + /** Fetch size */ + def fetchSize: Option[Int] = None + + /** + * Returns this query with the fetch suze updated to the row `count`. + * @see [[SqlQuery.fetchSize]] + */ + def withFetchSize(count: Option[Int]): SimpleSql[Row] = { + val simple = query.asSimple + + simple.copy(simple.sql.withFetchSize(count)) + } + + /** + * Aggregates over all rows using the specified operator. + * + * @param z the start value + * @param aliaser the column aliaser + * @param op Aggregate operator + * @return Either list of failures at left, or aggregated value + * @see #foldWhile + * @see #withResult + */ + def fold[T](z: => T, aliaser: ColumnAliaser)( + op: (T, Row) => T + )(using connection: Connection): Either[List[Throwable], T] = + query.asSimple.fold[T](z, aliaser)(op)(connection) + + /** + * Aggregates over part of or the while row stream, + * using the specified operator. + * + * @param z the start value + * @param aliaser the column aliaser + * @param op Aggregate operator. Returns aggregated value along with true if aggregation must process next value, or false to stop with current value. + * @return Either list of failures at left, or aggregated value + * @see #withResult + */ + def foldWhile[T](z: => T, aliaser: ColumnAliaser)( + op: (T, Row) => (T, Boolean) + )(using connection: Connection): Either[List[Throwable], T] = + query.asSimple.foldWhile[T](z, aliaser)(op)(connection) + + /** + * Processes all or some rows from current result. + * + * @param op Operation applied with row cursor + * @param aliaser the column aliaser + * + * {{{ + * import java.sql.Connection + * import anorm._ + * + * @annotation.tailrec + * def go(c: Option[Cursor], l: List[Row]): List[Row] = c match { + * case Some(cursor) => go(cursor.next, l :+ cursor.row) + * case _ => l + * } + * + * def l(using con: Connection): Either[List[Throwable], List[Row]] = + * SQL"SELECT * FROM Test".withResult(go(_, List.empty)) + * }}} + */ + def withResult[T](op: Option[Cursor] => T)(using connection: Connection): Either[List[Throwable], T] = + withResult[T](op, ColumnAliaser.empty) + + /** + * Processes all or some rows from current result. + * + * @param op Operation applied with row cursor + * + * {{{ + * import java.sql.Connection + * import anorm._ + * + * @annotation.tailrec + * def go(c: Option[Cursor], l: List[Row]): List[Row] = c match { + * case Some(cursor) => go(cursor.next, l :+ cursor.row) + * case _ => l + * } + * + * def l(implicit con: Connection): Either[List[Throwable], List[Row]] = + * SQL"SELECT * FROM Test".withResult(go(_, List.empty)) + * }}} + */ + def withResult[T](op: Option[Cursor] => T, aliaser: ColumnAliaser)(implicit + connection: Connection + ): Either[List[Throwable], T] = + query.asSimple.withResult[T](op, aliaser)(connection) + + /** + * Converts this query result as `T`, using parser. + * + * @param parser the result parser + * @see #asTry + */ + def as[T](parser: ResultSetParser[T])(implicit connection: Connection): T = + query.asSimple.asTry[T](parser, ColumnAliaser.empty).get + + /** + * Converts this query result as `T`, using parser. + * + * @param parser the result parser + * @param aliaser the column aliaser + */ + def asTry[T](parser: ResultSetParser[T], aliaser: ColumnAliaser = ColumnAliaser.empty)(implicit + connection: Connection + ): Try[T] = query.asSimple.asTry[T](parser, aliaser)(connection) + + /** + * Executes this SQL statement. + * @return true if resultset was returned from execution + * (statement is query), or false if it executed update. + * + * {{{ + * import anorm._ + * + * def res(implicit con: java.sql.Connection): Boolean = + * SQL"""INSERT INTO Test(a, b) VALUES(\\${"A"}, \\${"B"}""".execute() + * }}} + */ + def execute()(using connection: Connection): Boolean = + query.asSimple.execute() + + /** + * Executes this SQL as an update statement. + * @return Count of updated row(s) + */ + @throws[java.sql.SQLException]("If statement is query not update") + def executeUpdate()(using connection: Connection): Int = + query.asSimple.executeUpdate() + +end PackageCompat diff --git a/core/src/main/scala-3/anorm/macros/ImplicitResolver.scala b/core/src/main/scala-3/anorm/macros/ImplicitResolver.scala new file mode 100644 index 00000000..dfe3e5c4 --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/ImplicitResolver.scala @@ -0,0 +1,275 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.Macro.Placeholder + +private[macros] trait ImplicitResolver[A, Q <: Quotes] { + protected val quotes: Q + + import quotes.reflect.* + + // format: off + private given q: Q = quotes + // format: on + + protected val aTpeRepr: TypeRepr + + // The placeholder type + protected final lazy val PlaceholderType: TypeRepr = + TypeRepr.of[Placeholder] + + /** + * Refactor the input types, by replacing any type matching the `filter`, + * by the given `replacement`. + */ + @annotation.tailrec + private def refactor( + in: List[TypeRepr], + base: (TypeRepr, /*Type*/ Symbol), + out: List[TypeRepr], + tail: List[ + (List[TypeRepr], (TypeRepr, /*Type*/ Symbol), List[TypeRepr]) + ], + filter: TypeRepr => Boolean, + replacement: TypeRepr, + altered: Boolean + ): (TypeRepr, Boolean) = in match { + case tpe :: ts => + tpe match { + case t if filter(t) => + refactor( + ts, + base, + replacement :: out, + tail, + filter, + replacement, + true + ) + + case AppliedType(t, as) if as.nonEmpty => + refactor( + as, + t -> t.typeSymbol, + List.empty, + (ts, base, out) :: tail, + filter, + replacement, + altered + ) + + case t => + refactor( + ts, + base, + t :: out, + tail, + filter, + replacement, + altered + ) + } + + case _ => { + val tpe = base._1.appliedTo(out.reverse) + + tail match { + case (x, y, more) :: ts => + refactor( + x, + y, + tpe :: more, + ts, + filter, + replacement, + altered + ) + + case _ => tpe -> altered + } + } + } + + /** + * Replaces any reference to the type itself by the Placeholder type. + * @return the normalized type + whether any self reference has been found + */ + private def normalized(tpe: TypeRepr): (TypeRepr, Boolean) = + tpe match { + case t if t =:= aTpeRepr => PlaceholderType -> true + + case AppliedType(t, args) if args.nonEmpty => + refactor( + args, + t -> t.typeSymbol, + List.empty, + List.empty, + _ =:= aTpeRepr, + PlaceholderType, + false + ) + + case t => t -> false + } + + /* Restores reference to the type itself when Placeholder is found. */ + private def denormalized(ptype: TypeRepr): TypeRepr = ptype match { + case t if t =:= PlaceholderType => + aTpeRepr + + case AppliedType(base, args) if args.nonEmpty => + refactor( + args, + base -> base.typeSymbol, + List.empty, + List.empty, + _ == PlaceholderType, + aTpeRepr, + false + )._1 + + case _ => ptype + } + + private val PlaceholderHandlerName = + "reactivemongo.api.bson.Macros.Placeholder.Handler" + + /** + * @param tc the type representation of the typeclass + * @param forwardExpr the `Expr` that forward to the materialized instance itself + */ + private class ImplicitTransformer[T](forwardExpr: Expr[T]) extends TreeMap { + private val denorm = denormalized _ + + @SuppressWarnings(Array("AsInstanceOf")) + override def transformTree(tree: Tree)(owner: Symbol): Tree = tree match { + case TypeApply(tpt, args) => + TypeApply( + transformTree(tpt)(owner).asInstanceOf[Term], + args.map(transformTree(_)(owner).asInstanceOf[TypeTree]) + ) + + case t @ (Select(_, _) | Ident(_)) if t.show == PlaceholderHandlerName => + forwardExpr.asTerm + + case tt: TypeTree => + super.transformTree( + TypeTree.of(using denorm(tt.tpe).asType) + )(owner) + + case Apply(fun, args) => + Apply( + transformTree(fun)(owner).asInstanceOf[Term], + args.map(transformTree(_)(owner).asInstanceOf[Term]) + ) + + case _ => + super.transformTree(tree)(owner) + } + } + + /** + * @param pending a map of type to `Expr[M[_]]` (as term) + */ + private def createImplicit[M[_]]( + pending: Map[TypeRepr, Term], + debug: String => Unit + )(tc: Type[M], ptype: TypeRepr, tx: TreeMap): Option[Implicit] = { + val pt = ptype.asType + val (ntpe, selfRef) = normalized(ptype) + val ptpe = ntpe + + // infers given + val neededGivenType = TypeRepr.of[M](using tc).appliedTo(ptpe) + + val neededGiven: Option[Term] = + pending + .get(ptpe) + .orElse(Implicits.search(neededGivenType) match { + case suc: ImplicitSearchSuccess => { + if (!selfRef) { + Some(suc.tree) + } else { + tx.transformTree(suc.tree)(suc.tree.symbol) match { + case t: Term => Some(t) + case _ => Option.empty[Term] + } + } + } + + case _ => + Option.empty[Term] + }) + + debug { + val show: Option[String] = + try { + neededGiven.map(_.show) + } catch { + case e: MatchError /* Dotty bug */ => + neededGiven.map(_.symbol.fullName) + } + + s"// Resolve given ${prettyType(TypeRepr.of(using tc))} for ${prettyType(ntpe)} as ${prettyType( + neededGivenType + )} (self? ${selfRef}) = ${show.mkString}" + } + + neededGiven.map(_ -> selfRef) + } + + /** + * @param pending a map of type to `Expr[M[_]]` (as term) + */ + protected[macros] def resolver[M[_], T]( + forwardExpr: Expr[M[T]], + pending: Map[TypeRepr, Term], + debug: String => Unit + )(tc: Type[M]): TypeRepr => Option[Implicit] = { + val tx = new ImplicitTransformer[M[T]](forwardExpr) + + createImplicit(pending, debug)(tc, _: TypeRepr, tx) + } + + private def fullName(sym: Symbol): String = + sym.fullName + .replaceAll("(\\.package\\$|\\$|java\\.lang\\.|scala\\.Predef\\$\\.)", "") + + // To print the implicit types in the compiler messages + protected final def prettyType(t: TypeRepr): String = t match { + case _ if t <:< TypeRepr.of[EmptyTuple] => + "EmptyTuple" + + case AppliedType(ty, a :: b :: Nil) if ty <:< TypeRepr.of[*:] => + s"${prettyType(a)} *: ${prettyType(b)}" + + case AppliedType(_, args) => + fullName(t.typeSymbol) + args.map(prettyType).mkString("[", ", ", "]") + + case OrType(a, b) => + s"${prettyType(a)} | ${prettyType(b)}" + + case _ => { + val sym = t.typeSymbol + + if (sym.isTypeParam) { + sym.name + } else { + fullName(sym) + } + } + } + + type Implicit = (Term, Boolean) +} + +private[macros] object ImplicitResolver { + def apply[T](q: Quotes)(using tpe: Type[T]): ImplicitResolver[T, q.type] = + new ImplicitResolver[T, q.type] { + import q.reflect.* + + val quotes = q + val aTpeRepr = TypeRepr.of[T](using tpe) + } +} diff --git a/core/src/main/scala-3/anorm/macros/Inspect.scala b/core/src/main/scala-3/anorm/macros/Inspect.scala new file mode 100644 index 00000000..8e23deaa --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/Inspect.scala @@ -0,0 +1,90 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.Compat + +private[anorm] object Inspect: + + /** + * Recursively find the sub-classes of `tpr`. + * + * Non case class and generic classes are ignored. + * + * Sub-abstract types are not listed, but their own sub-types are examined; + * e.g. for trait `Foo` + * + * {{{ + * sealed trait Foo + * case class Bar(name: String) extends Foo + * sealed trait SubFoo extends Foo + * case class Lorem() extends SubFoo + * }}} + * + * Class `Lorem` is listed through `SubFoo`, + * but `SubFoo` itself is not returned. + */ + final def knownSubclasses( + q: Quotes + )(tpr: q.reflect.TypeRepr)(using Type[AnyVal]): Option[List[q.reflect.TypeRepr]] = { + import q.reflect.* + + tpr.classSymbol.flatMap { cls => + val anyValTpe: TypeRepr = TypeRepr.of[AnyVal] + + @annotation.tailrec + def subclasses( + children: List[Tree], + out: List[TypeRepr] + ): List[TypeRepr] = { + val childTpr = children.headOption.collect { + case tpd: Typed => + tpd.tpt.tpe + + case vd: ValDef => + vd.tpt.tpe + + case cd: ClassDef => + cd.constructor.returnTpt.tpe + + } + + childTpr match { + case Some(generic @ AppliedType(_, args)) if args.nonEmpty => { + report.warning(s"cannot handle class ${generic.show}: type parameter not supported") + + subclasses(children.tail, out) + } + + case Some(child) => { + val tpeSym = child.typeSymbol + val flags = tpeSym.flags + + if ( + (flags.is(Flags.Abstract) && flags.is(Flags.Sealed) && + !(child <:< anyValTpe)) || + (flags.is(Flags.Sealed) && flags.is(Flags.Trait)) + ) { + // Ignore sub-trait itself, but check the sub-sub-classes + subclasses(tpeSym.children.map(_.tree) ::: children.tail, out) + } else if (!flags.is(Flags.Case)) { + report.warning(s"cannot handle class ${child.show}: no case accessor") + + subclasses(children.tail, out) + } else { + subclasses(children.tail, child :: out) + } + } + + case _ => + out.reverse + } + } + + val types = subclasses(cls.children.map(_.tree), Nil) + + if (types.isEmpty) None else Some(types) + } + } + +end Inspect diff --git a/core/src/main/scala-3/anorm/macros/RowParserImpl.scala b/core/src/main/scala-3/anorm/macros/RowParserImpl.scala new file mode 100644 index 00000000..9be3422c --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/RowParserImpl.scala @@ -0,0 +1,335 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.{ Column, Row, RowParser, SqlResult, ~ } +import anorm.Macro.{ RowParserGenerator, debugEnabled } + +private[anorm] object RowParserImpl { + def apply[A]( + q: Quotes, + forwardExpr: Expr[RowParser[A]] + )(genGet: RowParserGenerator)(using tpe: Type[A], parserTpe: Type[RowParser]): Expr[Row => SqlResult[A]] = { + given quotes: Quotes = q + + import q.reflect.* + + val (repr, erased, aTArgs) = TypeRepr.of[A](using tpe) match { + case tpr @ AppliedType(e, args) => + Tuple3( + tpr, + e, + args.collect { + case repr: TypeRepr => + repr + } + ) + + case tpr => + Tuple3(tpr, tpr, List.empty[TypeRepr]) + } + + @inline def abort(msg: String) = report.errorAndAbort(msg) + + val tpeSym = repr.typeSymbol + + if (!tpeSym.isClassDef || !tpeSym.flags.is(Flags.Case)) { + abort(s"case class expected: ${repr.show}") + } + + // --- + + val ctor = tpeSym.primaryConstructor + + val (boundTypes, properties) = ctor.paramSymss match { + case targs :: paramss if targs.forall(_.isType) && paramss.headOption.exists(_.nonEmpty) => { + val boundTps = targs.zip(aTArgs).toMap + + boundTps -> paramss + } + + case params :: Nil if !params.exists(_.isType) => + Map.empty[Symbol, TypeRepr] -> List(params) + + case _ => + report.errorAndAbort(s"${repr.show} constructor has no parameter") + } + + if (properties.isEmpty) { + abort(s"parsed data cannot be passed as parameter: $ctor") + } + + val debug = { + if (debugEnabled) report.info(_: String) + else (_: String) => {} + } + + val resolv = ImplicitResolver[A](q).resolver(forwardExpr, Map.empty, debug)(parserTpe) + + // --- + + /* + * @tparam T the type of parsed data (single column or tuple-like `~`) + * @param parsing the parsing expression (e.g. `get("a") ~ get("b")`) + * @param parsedTpr the representation of type `T` + * @param matchPattern the match pattern to extract values inside `.map` + * @param columnSymss the column symbols bounds in the `matchPattern` (list of list as properties can be passed as multiple parameter list to the constructor) + */ + case class GenerationState[T]( + parsing: Expr[RowParser[T]], + parsedTpr: TypeRepr, + matchPattern: Tree, + columnSymss: List[List[Symbol]] + ) + + val pkg = Symbol.requiredPackage("anorm") + val TildeSelect = (for { + ts <- pkg.declaredType("~").headOption.map(_.companionModule) + un <- ts.declaredMethod("unapply").headOption + } yield Ref(pkg).select(ts).select(un)) match { + case Some(select) => + select + + case _ => + abort("Fails to resolve ~ symbol") + } + + @annotation.tailrec + def prepare[T]( + propss: List[List[Symbol]], + pi: Int, + combined: Option[GenerationState[T]], + hasSelfRef: Boolean, + hasGenericProperty: Boolean + )(using Type[T]): Option[Expr[Row => SqlResult[A]]] = + propss.headOption match { + case Some(sym :: localTail) => { + val tn = sym.name + + val tt: TypeRepr = sym.tree match { + case vd: ValDef => { + val vtpe = vd.tpt.tpe + + boundTypes.getOrElse(vtpe.typeSymbol, vtpe) + } + + case _ => + abort(s"Value definition expected for ${repr.show} constructor parameter: $sym") + } + + val isGenericProp = tt match { + case AppliedType(_, as) => + as.nonEmpty + + case _ => + false + } + + val colSym: Symbol = + Symbol.newBind(Symbol.spliceOwner, tn, Flags.Case, tt) + + // Pattern to match a single column in `.map` pattern matching + val singlePat = Bind(colSym, Typed(Wildcard(), Inferred(tt))) + + tt.asType match { + case '[t] => + def initialState(expr: Expr[RowParser[t]]) = + GenerationState[t]( + parsing = expr, + parsedTpr = tt, + matchPattern = singlePat, + columnSymss = List(colSym) :: Nil + ) + + def combineState( + parent: GenerationState[T], + expr: Expr[RowParser[T ~ t]] + ): GenerationState[T ~ t] = { + val pat = Unapply( + fun = TypeApply(TildeSelect, List(Inferred(parent.parsedTpr), Inferred(tt))), + implicits = Nil, + patterns = List(parent.matchPattern, singlePat) + ) + + GenerationState[T ~ t]( + parsing = expr, + parsedTpr = TypeRepr.of[T ~ t], + matchPattern = pat, + columnSymss = parent.columnSymss match { + case headList :: tails => + (colSym :: headList) :: tails + + case _ => { + // Should have been handled by initialState, smth wrong + abort(s"Fails to handle initial state of RowParser generation: ${repr.show}") + } + } + ) + } + + Expr.summon[Column[t]] match { + case None => + // ... try to resolve `RowParser[tt]` + resolv(tt) match { + case None => + abort(s"cannot find Column nor RowParser for ${tn}:${tt.show} in ${ctor.fullName}") + + case Some((pr, s)) => { + val hasSelf = if s then s else hasSelfRef + + // Use an existing `RowParser[t]` as part + val tpr: Expr[RowParser[t]] = pr.asExprOf[RowParser[t]] + + combined match { + case Some(parent @ GenerationState(prev, _, _, _)) => + prepare[T ~ t]( + propss = localTail :: propss.tail, + pi = pi + 1, + combined = Some { + combineState(parent, '{ $prev ~ $tpr }) + }, + hasSelfRef = hasSelf, + hasGenericProperty = isGenericProp || hasGenericProperty + ) + + case _ => + prepare[t]( + propss = localTail :: propss.tail, + pi = pi + 1, + combined = Some(initialState(tpr)), + hasSelfRef = hasSelf, + hasGenericProperty = isGenericProp || hasGenericProperty + ) + } + } + } + + case Some(col) => { + // Generate a `get` for the `Column[T]` + val get: Expr[RowParser[t]] = genGet[t](col, tn, pi) + + combined match { + case Some(parent @ GenerationState(prev, _, _, _)) => + prepare[T ~ t]( + propss = localTail :: propss.tail, + pi = pi + 1, + combined = Some { + combineState(parent, '{ $prev ~ $get }) + }, + hasSelfRef = hasSelfRef, + hasGenericProperty = isGenericProp || hasGenericProperty + ) + + case None => + prepare[t]( + propss = localTail :: propss.tail, + pi = pi + 1, + combined = Some(initialState(get)), + hasSelfRef = hasSelfRef, + hasGenericProperty = isGenericProp || hasGenericProperty + ) + } + } + } + } + } + + case Some(Nil) if propss.tail.nonEmpty => { + // End of one parameter list for the properties, but there is more + + prepare[T]( + propss = propss.tail, // other parameter lists + pi = pi, + combined = combined match { + case Some(state) => + Some(state.copy(columnSymss = Nil :: state.columnSymss)) + + case None => + abort("Missing generation state: ${repr.show}") + }, + hasSelfRef = hasSelfRef, + hasGenericProperty = hasGenericProperty + ) + } + + case Some(Nil) | None => + combined match { + case None => + None + + case Some(GenerationState(parsing, _, matchPattern, revColss)) => { + val targs = boundTypes.values.toList + val colArgss = revColss.reverse.map { + _.reverse.map(Ref(_: Symbol)) + } + val newTerm = New(Inferred(erased)).select(ctor) + + val ctorCall: Expr[A] = { + if (targs.nonEmpty) { + newTerm.appliedToTypes(targs).appliedToArgss(colArgss) + } else { + newTerm.appliedToArgss(colArgss) + } + }.asExprOf[A] + + val ctorCase = CaseDef( + pattern = matchPattern, + guard = None, + rhs = '{ anorm.Success[A](${ ctorCall }) }.asTerm + ) + + inline def cases[U: Type](inline parsed: Expr[U]) = { + // Workaround as in case of generic property + // (whose type has type arguments), false warning is raised + // about exhaustivity. + + if (hasGenericProperty) { + List( + ctorCase, + CaseDef( + Wildcard(), + guard = None, + rhs = '{ + anorm.Error( + anorm.SqlMappingError( + "Unexpected parsed value: " + ${ parsed } + ) + ) + }.asTerm + ) + ) + } else { + List(ctorCase) + } + } + + inline def flatMapParsed[U: Type](inline parsed: Expr[U]): Expr[SqlResult[A]] = + Match(parsed.asTerm, cases(parsed)).asExprOf[SqlResult[A]] + + Some('{ + lazy val parsingRow = ${ parsing } + + { (row: Row) => + parsingRow(row).flatMap { parsed => + ${ flatMapParsed('parsed) } + } + } + }) + } + } + } + + val generated: Expr[Row => SqlResult[A]] = + prepare[Nothing](properties, 0, None, false, false) match { + case Some(fn) => + fn + + case _ => + abort(s"Fails to prepare the parser function: ${repr.show}") + } + + debug(s"row parser generated for ${repr.show}: ${generated.show}") + + generated + } +} diff --git a/core/src/main/scala-3/anorm/macros/SealedRowParserImpl.scala b/core/src/main/scala-3/anorm/macros/SealedRowParserImpl.scala new file mode 100644 index 00000000..5b0e6784 --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/SealedRowParserImpl.scala @@ -0,0 +1,132 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.Macro.{ debugEnabled, Discriminate, DiscriminatorNaming } +import anorm.{ Error, RowParser, SqlMappingError, SqlParser } + +private[anorm] object SealedRowParserImpl { + def apply[A]( + naming: Expr[DiscriminatorNaming], + discriminate: Expr[Discriminate] + )(using q: Quotes, tpe: Type[A]): Expr[RowParser[A]] = { + import q.reflect.* + + val repr = TypeRepr.of[A](using tpe) + + val subclasses = Inspect.knownSubclasses(q)(repr) match { + case Some(classes) => + classes + + case None => + report.errorAndAbort(s"cannot find any subclass: ${repr.show}") + } + + // --- + + type CaseType[U <: A] = U + + val subParsers = List.newBuilder[(TypeRepr, Expr[RowParser[_]])] + + val missing: List[TypeRepr] = subclasses.flatMap { subcls => + subcls.asType match { + case '[CaseType[sub]] => + Expr.summon[RowParser[sub]] match { + case Some(subParser) => { + subParsers += subcls -> subParser + + List.empty[TypeRepr] + } + + case _ => + List(subcls) + } + + case _ => + List(subcls) + } + } + + if (missing.nonEmpty) { + def details = missing + .map { subcls => + s"- cannot find anorm.RowParser[${subcls.show}] in the implicit scope" + } + .mkString(",\r\n") + + report.errorAndAbort(s"fails to generate sealed parser: ${repr.show};\r\n$details") + } + + // --- + + val cases: List[(String, CaseDef)] = subParsers.result().map { + case (subcls, subParser) => + val tpeSym = subcls.typeSymbol + val tpeName = { + if (tpeSym.flags.is(Flags.Module)) tpeSym.fullName.stripSuffix(f"$$") + else tpeSym.fullName + } + + val key = '{ $discriminate(${ Expr(tpeName) }) } + + val bind = + Symbol.newBind( + Symbol.spliceOwner, + tpeSym.name, + Flags.Case, + TypeRepr.of[String] + ) + + val ref = Ref(bind).asExprOf[String] + + tpeSym.fullName -> CaseDef( + Bind(bind, Wildcard()), + guard = Some('{ $ref == $key }.asTerm), + rhs = subParser.asTerm + ) + } + + def fallbackCase: CaseDef = { + val fallbackBind = + Symbol.newBind( + Symbol.spliceOwner, + "d", + Flags.Case, + TypeRepr.of[String] + ) + + val fallbackVal = Ref(fallbackBind).asExprOf[String] + + CaseDef( + Bind(fallbackBind, Wildcard()), + guard = None, + rhs = '{ + val msg = + "unexpected row type '%s'; expected: %s".format($fallbackVal, ${ Expr(cases.map(_._1)) }.mkString(", ")) + + RowParser.failed[A](Error(SqlMappingError(msg))) + }.asTerm + ) + } + + inline def body(inline discriminatorVal: Expr[String]): Expr[RowParser[A]] = + Match( + discriminatorVal.asTerm, + cases.map(_._2) :+ fallbackCase + ).asExprOf[RowParser[A]] + + val parser: Expr[RowParser[A]] = '{ + val discriminatorCol = $naming(${ Expr(repr.typeSymbol.fullName) }) + + SqlParser.str(discriminatorCol).flatMap { (discriminatorVal: String) => + ${ body('discriminatorVal) } + } + } + + if (debugEnabled) { + report.info(s"row parser generated for $tpe: ${parser.show}") + } + + parser + } +} diff --git a/core/src/main/scala-3/anorm/macros/ToParameterListImpl.scala b/core/src/main/scala-3/anorm/macros/ToParameterListImpl.scala new file mode 100644 index 00000000..a1be8b06 --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/ToParameterListImpl.scala @@ -0,0 +1,310 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.{ Compat, Macro, ToParameterList, ToParameterValue, ToSql, ToStatement, NamedParameter } +import Macro.{ debugEnabled, ParameterProjection } + +private[anorm] object ToParameterListImpl { + def sealedTrait[A](using q: Quotes, tpe: Type[A]): Expr[ToParameterList[A]] = { + + import q.reflect.* + + val repr = TypeRepr.of[A](using tpe) + + val subclasses = Inspect.knownSubclasses(q)(repr) match { + case Some(classes) => + classes + + case None => + report.errorAndAbort(s"cannot find any subclass: $tpe") + } + + // --- + + type CaseType[U <: A] = U + + val cases = subclasses.zipWithIndex.map { + case (subcls, i) => + subcls.asType match { + case '[CaseType[t]] => + Expr.summon[ToParameterList[t]] match { + case Some(toParams) => { + val bind = + Symbol.newBind( + Symbol.spliceOwner, + s"sub${i}", + Flags.Case, + subcls + ) + + val matchedRef = Ref(bind).asExprOf[t] + + CaseDef( + Bind(bind, Typed(Wildcard(), Inferred(subcls))), + guard = None, + rhs = '{ $toParams($matchedRef) }.asTerm + ) + } + + case None => + report.errorAndAbort(s"Missing ToParameterList[${subcls.show}]") + } + + case _ => + report.errorAndAbort(s"Invalid subclass ${subcls.show} for ${repr.show}") + } + } + + inline def body(inline a: Expr[A]): Expr[List[NamedParameter]] = + Match(a.asTerm, cases).asExprOf[List[NamedParameter]] + + val block = '{ + ToParameterList[A] { (a: A) => ${ body('a) } } + } + + if (debugEnabled) { + report.info(s"ToParameterList generated for ${repr.show}: ${block.show}") + } + + block + } + + def caseClass[A]( + forwardExpr: Expr[ToParameterList[A]], + projection: Expr[Seq[ParameterProjection]], + separator: Expr[String] + )(using q: Quotes, tpe: Type[A], tsTpe: Type[ToParameterList]): Expr[A => List[NamedParameter]] = { + import q.reflect.* + + val (aTpr, aTArgs) = TypeRepr.of[A](using tpe) match { + case tpr @ AppliedType(_, args) => + tpr -> args.collect { + case repr: TypeRepr => + repr + } + + case tpr => + tpr -> List.empty[TypeRepr] + } + + val tpeSym = aTpr.typeSymbol + + @inline def abort(msg: String) = report.errorAndAbort(msg) + + if (!tpeSym.isClassDef || !tpeSym.flags.is(Flags.Case)) { + abort(s"Case class expected: $tpe") + } + + val ctor = tpeSym.primaryConstructor + + val (boundTypes, properties) = ctor.paramSymss match { + case targs :: params :: tail if targs.forall(_.isType) => { + if (tail.nonEmpty) { + report.info( + s"${aTpr.show} constructor has multiple list of parameters. As for unapply, only for the first one will be considered" + ) + } + + val boundTps = targs.zip(aTArgs).toMap + + boundTps -> params + } + + case params :: Nil if !params.exists(_.isType) => + Map.empty[Symbol, TypeRepr] -> params + + case _ => + report.errorAndAbort(s"${aTpr.show} constructor has no parameter") + } + + if (properties.isEmpty) { + abort("parsed data cannot be passed as constructor parameters") + } + + // --- + + val debug = { + if (debugEnabled) report.info(_: String) + else (_: String) => {} + } + + val resolv = ImplicitResolver[A](q).resolver(forwardExpr, Map.empty, debug)(tsTpe) + + val compiledProjection: Seq[ParameterProjection] = { + import _root_.anorm.Macro.parameterProjectionFromExpr + + projection.valueOrAbort + } + lazy val projectionMap = compiledProjection.collect { + case ParameterProjection(propertyName, Some(parameterName)) => + propertyName -> parameterName + }.toMap + + // Among the properties, according the specified projection + val selectedProperties: Seq[String] = { + val propertyNames = properties.map(_.name) + + if (compiledProjection.isEmpty) { + propertyNames + } else { + compiledProjection.collect { + case proj if propertyNames.contains(proj.propertyName) => + proj.propertyName + } + } + } + + if (selectedProperties.isEmpty) { + report.errorAndAbort( + s"No property selected to be converted as SQL parameter for ${aTpr.show}: ${properties.mkString(", ")}" + ) + } + + // --- + + type Builder = scala.collection.mutable.Builder[NamedParameter, List[NamedParameter]] + + type Append[T] = Function2[Expr[T], Expr[Builder], Expr[Builder]] + + val namedAppends = Map.newBuilder[String, (TypeRepr, Append[Any])] + + properties.foreach { sym => + if (!selectedProperties.contains(sym.name)) { + debug(s"${sym.name} is filtered: ${selectedProperties.mkString(", ")}") + } else { + val pname = Expr(projectionMap.getOrElse(sym.name, sym.name)) + + val tt: TypeRepr = sym.tree match { + case vd: ValDef => { + val vtpe = vd.tpt.tpe + + boundTypes.getOrElse(vtpe.typeSymbol, vtpe) + } + + case _ => + report.errorAndAbort(s"Value definition expected for ${aTpr.show} constructor parameter: $sym") + } + + tt.asType match { + case pt @ '[t] => { + val toSql = Expr.summon[ToSql[t]] match { + case Some(resolved) => + resolved + + case _ => + '{ null: ToSql[t] } + } + + resolv(tt) match { + case None if tt <:< aTpr => { + val append: Function2[Expr[A], Expr[Builder], Expr[Builder]] = { (v, buf) => + '{ + val prefix: String = $pname + $separator + + $buf ++= ${ forwardExpr }($v).map { p => + p.copy(name = prefix + p.name) + } + } + } + + namedAppends += sym.name -> (tt -> append.asInstanceOf[Append[Any]]) + } + + case None => + Expr.summon[ToStatement[t]] match { + case None => + abort(s"cannot find either anorm.ToParameterList or anorm.ToStatement for ${sym.name}:${tt.show}") + + case Some(toStmt) => { // use ToSql+ToStatement + val append: Function2[Expr[t], Expr[Builder], Expr[Builder]] = { (v, buf) => + '{ + $buf += NamedParameter + .namedWithString($pname -> $v)(ToParameterValue($toSql, $toStmt)) + } + } + + namedAppends += sym.name -> (tt -> append.asInstanceOf[Append[Any]]) + } + } + + case Some((toParams, _)) => { // use ToParameterList + val append: Function2[Expr[t], Expr[Builder], Expr[Builder]] = { (v, buf) => + '{ + val prefix: String = $pname + $separator + + $buf ++= ${ toParams.asExprOf[ToParameterList[t]] }($v).map { p => + p.copy(name = prefix + p.name) + } + } + } + + namedAppends += sym.name -> (tt -> append.asInstanceOf[Append[Any]]) + } + } + } + } + } + } + + val appendParameters: Map[String, (TypeRepr, Append[Any])] = + namedAppends.result() + + inline def appendField( + inline a: Term, + buf: Expr[Builder], + fieldName: String, + paramName: String + ): Expr[Builder] = appendParameters.get(fieldName) match { + case Some((pTpr, appendFn)) => { + val fieldValue = a.select(a.symbol.fieldMember(fieldName)) + + appendFn(fieldValue.asExpr, buf) + } + + case _ => + report.errorAndAbort(s"Missing append function for ${fieldName}: ${aTpr.show}") + } + + inline def withBuilder(inline a: Term, buf: Expr[Builder]): Expr[Unit] = { + val pj: List[(String, String)] = { + if (compiledProjection.nonEmpty) { + compiledProjection.toList.flatMap { + case ParameterProjection(nme, param) => + param.map(nme -> _) + } + } else { + selectedProperties.map { n => n -> n }.toList + } + } + + Expr.block( + pj.map { + case (fieldName, paramName) => + appendField(a, buf, fieldName, paramName) + }, + '{ () } + ) + } + + inline def appendBlock(inline a: Expr[A]): Expr[List[NamedParameter]] = { + val term = asTerm(a) + + '{ + val buf: Builder = List.newBuilder[NamedParameter] + + ${ withBuilder(term, 'buf) } + + buf.result() + } + } + + val block = '{ (a: A) => ${ appendBlock('a) } } + + if (debugEnabled) { + report.info(s"ToParameterList generated for ${aTpr.show}: ${block.show}") + } + + block + } +} diff --git a/core/src/main/scala-3/anorm/macros/ValueColumnImpl.scala b/core/src/main/scala-3/anorm/macros/ValueColumnImpl.scala new file mode 100644 index 00000000..4f66454d --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/ValueColumnImpl.scala @@ -0,0 +1,61 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.Column +import anorm.Macro.debugEnabled + +private[anorm] trait ValueColumn { + protected def valueColumnImpl[A <: AnyVal](using q: Quotes, tpe: Type[A]): Expr[Column[A]] = { + import q.reflect.* + + val aTpr = TypeRepr.of[A](using tpe) + val ctor = aTpr.typeSymbol.primaryConstructor + + ctor.paramSymss match { + case List(v) :: Nil => { + v.tree match { + case vd: ValDef => { + val tpr = vd.tpt.tpe + + tpr.asType match { + case vtpe @ '[t] => + Expr.summon[Column[t]] match { + case Some(col) => { + def mapf(in: Expr[t]): Expr[A] = + New(Inferred(aTpr)) + .select(ctor) + .appliedTo(in.asTerm) + .asExprOf[A] + + val generated = '{ ${ col }.map(in => ${ mapf('in) }) } + + if (debugEnabled) { + report.info(s"column generated for ${aTpr.show}: ${generated.show}") + } + + generated + } + + case _ => + report.errorAndAbort( + s"Instance not found: ${classOf[Column[_]].getName}[${tpr.typeSymbol.fullName}]" + ) + } + } + } + + case _ => + report.errorAndAbort( + s"Constructor parameter expected, found: ${v}" + ) + } + } + + case _ => + report.errorAndAbort( + s"Cannot resolve value Column for '${aTpr.typeSymbol.name}'" + ) + } + } +} diff --git a/core/src/main/scala-3/anorm/macros/ValueToStatement.scala b/core/src/main/scala-3/anorm/macros/ValueToStatement.scala new file mode 100644 index 00000000..8b54bf80 --- /dev/null +++ b/core/src/main/scala-3/anorm/macros/ValueToStatement.scala @@ -0,0 +1,70 @@ +package anorm.macros + +import scala.quoted.{ Expr, Quotes, Type } + +import anorm.Macro.debugEnabled +import anorm.ToStatement + +private[anorm] trait ValueToStatement { + protected def valueToStatementImpl[A <: AnyVal](using q: Quotes, tpe: Type[A]): Expr[ToStatement[A]] = { + + import q.reflect.* + + val aTpr = TypeRepr.of[A](using tpe) + val ctor = aTpr.typeSymbol.primaryConstructor + + ctor.paramSymss match { + case List(v) :: Nil => { + v.tree match { + case vd: ValDef => { + val tpr = vd.tpt.tpe + + tpr.asType match { + case vtpe @ '[t] => + Expr.summon[ToStatement[t]] match { + case Some(ts) => { + def inner(a: Expr[A]) = { + val term = asTerm(a) + + term + .select(term.symbol.fieldMember(v.name)) + .asExprOf[t](using vtpe) + } + + val generated = '{ + new ToStatement[A] { + def set(s: java.sql.PreparedStatement, i: Int, a: A): Unit = + ${ ts }.set(s, i, ${ inner('a) }) + } + } + + if (debugEnabled) { + report.info(s"ToStatement for $tpe: ${generated.show}") + } + + generated + } + + case _ => + report.errorAndAbort( + s"Instance not found: ${classOf[ToStatement[_]].getName}[${tpr.typeSymbol.fullName}]" + ) + } + + } + } + + case _ => + report.errorAndAbort( + s"Constructor parameter expected, found: ${v}" + ) + } + } + + case _ => + report.errorAndAbort( + s"Cannot resolve value ToStatement for '${aTpr.typeSymbol.name}'" + ) + } + } +} diff --git a/core/src/main/scala-3/silent.scala b/core/src/main/scala-3/silent.scala new file mode 100644 index 00000000..5c36c17f --- /dev/null +++ b/core/src/main/scala-3/silent.scala @@ -0,0 +1,3 @@ +package com.github.ghik.silencer + +class silent(s: String = "") extends scala.annotation.StaticAnnotation diff --git a/core/src/main/scala/anorm/Anorm.scala b/core/src/main/scala/anorm/Anorm.scala index 859c31a5..3c35242b 100644 --- a/core/src/main/scala/anorm/Anorm.scala +++ b/core/src/main/scala/anorm/Anorm.scala @@ -7,7 +7,9 @@ import java.sql.{ Connection, PreparedStatement, ResultSet } import scala.util.{ Failure, Try } -import resource.{ managed, ManagedResource } +import scala.reflect.ClassTag + +import resource.{ managed, ManagedResource, Resource } /** * Untyped value wrapper. @@ -15,7 +17,7 @@ import resource.{ managed, ManagedResource } * {{{ * import anorm._ * - * def foo(v: Any) = SQL("UPDATE t SET val = {o}").on('o -> anorm.Object(v)) + * def foo(v: Any) = SQL("UPDATE t SET val = {o}").on("o" -> anorm.Object(v)) * }}} */ case class Object(value: Any) @@ -53,6 +55,11 @@ object SeqParameter { } private[anorm] trait Sql extends WithResult { + private implicit val statementCls: ClassTag[PreparedStatement] = + statementClassTag + + private implicit val resultSetCls: ClassTag[ResultSet] = resultSetClassTag + private[anorm] def unsafeStatement(connection: Connection, getGeneratedKeys: Boolean = false): PreparedStatement private[anorm] def unsafeStatement( @@ -65,7 +72,8 @@ private[anorm] trait Sql extends WithResult { connection: Connection, getGeneratedKeys: Boolean = false ): ManagedResource[PreparedStatement] = { - implicit val res = StatementResource + implicit val res: Resource[PreparedStatement] = StatementResource + managed(unsafeStatement(connection, getGeneratedKeys)) } @@ -74,7 +82,8 @@ private[anorm] trait Sql extends WithResult { generatedColumn: String, generatedColumns: Seq[String] ): ManagedResource[PreparedStatement] = { - implicit val res = StatementResource + implicit val res: Resource[PreparedStatement] = StatementResource + managed(unsafeStatement(connection, generatedColumn, generatedColumns)) } @@ -83,7 +92,8 @@ private[anorm] trait Sql extends WithResult { */ protected def resultSet(connection: Connection): ManagedResource[ResultSet] = preparedStatement(connection).flatMap { stmt => - implicit val res = ResultSetResource + implicit val res: Resource[ResultSet] = ResultSetResource + managed(stmt.executeQuery()) } @@ -197,15 +207,20 @@ private[anorm] trait Sql extends WithResult { prep: Connection => ManagedResource[PreparedStatement], generatedKeysParser: ResultSetParser[A], as: ColumnAliaser - )(implicit connection: Connection): Try[A] = Sql.asTry( - generatedKeysParser, - prep(connection).flatMap { stmt => - stmt.executeUpdate() - managed(stmt.getGeneratedKeys) - }, - resultSetOnFirstRow, - as - ) + )(implicit connection: Connection): Try[A] = { + @com.github.ghik.silencer.silent + implicit def cls: ClassTag[ResultSet] = resultSetClassTag + + Sql.asTry( + generatedKeysParser, + prep(connection).flatMap { stmt => + stmt.executeUpdate() + managed(stmt.getGeneratedKeys) + }, + resultSetOnFirstRow, + as + ) + } /** * Executes this SQL query, and returns its result. @@ -262,7 +277,6 @@ object Sql { // TODO: Rename to SQL private def toSql(ts: List[StatementToken], buf: StringBuilder): StringBuilder = ts.foldLeft(buf) { case (sql, StringToken(t)) => sql ++= t case (sql, PercentToken) => sql += '%' - case (sql, _) => sql } @SuppressWarnings(Array("IncorrectlyNamedExceptions")) diff --git a/core/src/main/scala/anorm/BatchSql.scala b/core/src/main/scala/anorm/BatchSql.scala index 4f66ee61..ff65b1b3 100644 --- a/core/src/main/scala/anorm/BatchSql.scala +++ b/core/src/main/scala/anorm/BatchSql.scala @@ -2,7 +2,7 @@ package anorm import java.sql.{ Connection, PreparedStatement } -import resource.managed +import resource.{ managed, Resource } private[anorm] object BatchSqlErrors { val HeterogeneousParameterMaps = "if each map hasn't same parameter names" @@ -85,7 +85,11 @@ sealed trait BatchSql { fill(connection, null, getGeneratedKeys, params) def execute()(implicit connection: Connection): Array[Int] = { - implicit val res = StatementResource + implicit val res: Resource[PreparedStatement] = StatementResource + + implicit val cls: scala.reflect.ClassTag[PreparedStatement] = + statementClassTag + managed(getFilledStatement(connection)).acquireAndGet(_.executeBatch()) } @@ -252,4 +256,5 @@ object BatchSql { private[anorm] case class Copy(sql: SqlQuery, names: Set[String], params: Seq[Map[String, ParameterValue]]) extends BatchSql + } diff --git a/core/src/main/scala/anorm/Column.scala b/core/src/main/scala/anorm/Column.scala index a9a2097e..76f472de 100644 --- a/core/src/main/scala/anorm/Column.scala +++ b/core/src/main/scala/anorm/Column.scala @@ -16,6 +16,8 @@ import java.sql.Timestamp import scala.util.{ Failure, Success => TrySuccess, Try } import scala.util.control.NonFatal +import scala.reflect.ClassTag + import resource.managed /** @@ -610,14 +612,19 @@ object Column extends JodaColumn with JavaTimeColumn { unsafe } - @inline private def streamBytes(in: InputStream): Either[SqlRequestError, Array[Byte]] = managed(in) - .acquireFor(streamToBytes(_)) - .fold( - { errs => - Left(TypeDoesNotMatch(errs.headOption.fold("Fails to read binary stream")(_.getMessage))) - }, - Right(_) - ) + @inline private def streamBytes(in: InputStream): Either[SqlRequestError, Array[Byte]] = { + import resource.extractedEitherToEither + implicit val cls: ClassTag[InputStream] = inputStreamClassTag + + managed(in) + .acquireFor(streamToBytes(_)) + .fold( + { errs => + Left(TypeDoesNotMatch(errs.headOption.fold("Fails to read binary stream")(_.getMessage))) + }, + Right(_) + ) + } @annotation.tailrec private def streamToBytes( @@ -630,6 +637,9 @@ object Column extends JodaColumn with JavaTimeColumn { if (count == -1) bytes else streamToBytes(in, bytes ++ buffer.take(count), buffer) } + + private[anorm] lazy val inputStreamClassTag = + implicitly[ClassTag[InputStream]] } sealed trait JodaColumn { @@ -643,7 +653,7 @@ sealed trait JodaColumn { * * {{{ * import org.joda.time.LocalDate - * import anorm.{ SQL, SqlParser }, SqlParser.scalar + * import anorm._, SqlParser.scalar * * def ld(implicit con: java.sql.Connection): LocalDate = * SQL("SELECT last_mod FROM tbl").as(scalar[LocalDate].single) @@ -728,7 +738,7 @@ sealed trait JodaColumn { * Parses column as joda Instant * * {{{ - * import anorm.{ SQL, SqlParser }, SqlParser.scalar + * import anorm._, SqlParser.scalar * import org.joda.time.Instant * * def d(implicit con: java.sql.Connection): Instant = diff --git a/core/src/main/scala/anorm/MacroOptions.scala b/core/src/main/scala/anorm/MacroOptions.scala new file mode 100644 index 00000000..2353b335 --- /dev/null +++ b/core/src/main/scala/anorm/MacroOptions.scala @@ -0,0 +1,104 @@ +package anorm + +private[anorm] trait MacroOptions { + + /** + * Naming strategy, to map each class property to the corresponding column. + */ + trait ColumnNaming extends (String => String) { + + /** + * Returns the column name for the class property. + * + * @param property the name of the case class property + */ + def apply(property: String): String + } + + /** Naming companion */ + object ColumnNaming { + + /** Keep the original property name. */ + object Identity extends ColumnNaming { + def apply(property: String) = property + } + + /** + * For each class property, use the snake case equivalent + * to name its column (e.g. fooBar -> foo_bar). + */ + object SnakeCase extends ColumnNaming { + private val re = "[A-Z]+".r + + def apply(property: String): String = + re.replaceAllIn(property, { m => s"_${m.matched.toLowerCase}" }) + } + + /** Naming using a custom transformation function. */ + def apply(transformation: String => String): ColumnNaming = + new ColumnNaming { + def apply(property: String): String = transformation(property) + } + } + + trait Discriminate extends (String => String) { + + /** + * Returns the value representing the specified type, + * to be used as a discriminator within a sealed family. + * + * @param tname the name of type (class or object) to be discriminated + */ + def apply(tname: String): String + } + + object Discriminate { + sealed class Function(f: String => String) extends Discriminate { + def apply(tname: String) = f(tname) + } + + /** Uses the type name as-is as value for the discriminator */ + object Identity extends Function(identity[String]) + + /** Returns a `Discriminate` function from any `String => String`. */ + def apply(discriminate: String => String): Discriminate = + new Function(discriminate) + } + + trait DiscriminatorNaming extends (String => String) { + + /** + * Returns the name for the discriminator column. + * @param familyType the name of the famility type (sealed trait) + */ + def apply(familyType: String): String + } + + object DiscriminatorNaming { + sealed class Function(f: String => String) extends DiscriminatorNaming { + def apply(familyType: String) = f(familyType) + } + + /** Always use "classname" as name for the discriminator column. */ + object Default extends Function(_ => "classname") + + /** Returns a naming according from any `String => String`. */ + def apply(naming: String => String): DiscriminatorNaming = + new Function(naming) + } + + /** + * @param propertyName the name of the class property + * @param parameterName the name of for the parameter, + * if different from the property one, otherwise `None` + */ + case class ParameterProjection(propertyName: String, parameterName: Option[String] = None) + + object ParameterProjection { + def apply( + propertyName: String, + parameterName: String + ): ParameterProjection = + ParameterProjection(propertyName, Option(parameterName)) + } +} diff --git a/core/src/main/scala/anorm/MetaData.scala b/core/src/main/scala/anorm/MetaData.scala index 68571213..eca4fcef 100644 --- a/core/src/main/scala/anorm/MetaData.scala +++ b/core/src/main/scala/anorm/MetaData.scala @@ -150,7 +150,7 @@ private[anorm] object MetaData { } - val cn = ColumnName(tableName + "." + meta.getColumnName(i), alias = Option(meta.getColumnLabel(i))) + val cn = ColumnName(qualified = tableName + "." + meta.getColumnName(i), alias = Option(meta.getColumnLabel(i))) val colName = as(i -> cn).fold(cn)(a => cn.copy(alias = Some(a))) diff --git a/core/src/main/scala/anorm/NamedParameter.scala b/core/src/main/scala/anorm/NamedParameter.scala index defb8906..9995dc75 100644 --- a/core/src/main/scala/anorm/NamedParameter.scala +++ b/core/src/main/scala/anorm/NamedParameter.scala @@ -42,25 +42,15 @@ object NamedParameter { /** * $namedWithSymbol - * - * {{{ - * import anorm.{ NamedParameter, ParameterValue } - * - * def foo(pv: ParameterValue): NamedParameter = 'name -> pv - * }}} */ + @deprecated(message = "Symbol is deprecated", since = "2.7.0") implicit def namedWithSymbol(t: (Symbol, ParameterValue)): NamedParameter = NamedParameter(t._1.name, t._2) /** * $namedWithSymbol - * - * {{{ - * import anorm.NamedParameter - * - * val p: NamedParameter = 'name -> 1L - * }}} */ + @deprecated(message = "Symbol is deprecated", since = "2.7.0") implicit def namedWithSymbol[V](t: (Symbol, V))(implicit c: ToParameterValue[V]): NamedParameter = NamedParameter(t._1.name, c(t._2)) diff --git a/core/src/main/scala/anorm/ParameterValue.scala b/core/src/main/scala/anorm/ParameterValue.scala index 086256b9..b5b38e3d 100644 --- a/core/src/main/scala/anorm/ParameterValue.scala +++ b/core/src/main/scala/anorm/ParameterValue.scala @@ -66,7 +66,7 @@ object ParameterValue { @throws[IllegalArgumentException]("if value `v` is null whereas `toStmt` is marked with [[anorm.NotNullGuard]]") @SuppressWarnings(Array("NullParameter")) - @inline def apply[A](v: A, s: ToSql[A], toStmt: ToStatement[A]) = + @inline def apply[A](v: A, s: ToSql[A], toStmt: ToStatement[A]): ParameterValue = (v, toStmt) match { case (null, _: NotNullGuard) => throw new IllegalArgumentException() case _ => new DefaultParameterValue(v, s, toStmt) diff --git a/core/src/main/scala/anorm/Row.scala b/core/src/main/scala/anorm/Row.scala index 52eaa257..da977da4 100644 --- a/core/src/main/scala/anorm/Row.scala +++ b/core/src/main/scala/anorm/Row.scala @@ -38,10 +38,11 @@ trait Row { * @see #as */ lazy val asMap: Map[String, Any] = - Compat.toMap(Compat.lazyZip(data, metaData.ms)) { case (v, m) => - val k = m.column.qualified + Compat.toMap(Compat.lazyZip(data, metaData.ms)) { + case (v, m) => + val k = m.column.qualified - if (m.nullable) k -> Option(v) else k -> v + if (m.nullable) k -> Option(v) else k -> v } /** @@ -76,8 +77,7 @@ trait Row { * @param c Column mapping * * {{{ - * import anorm.SQL - * import anorm.Column.columnToString // mapping column to string + * import anorm._, Column.columnToString // mapping column to string * * val res = SQL("SELECT * FROM Test").map { row => * // string columns 'code' and 'label' @@ -95,8 +95,7 @@ trait Row { * @param c Column mapping * * {{{ - * import anorm.SQL - * import anorm.Column.columnToString // mapping column to string + * import anorm._, Column.columnToString // mapping column to string * * val res = SQL("SELECT * FROM Test").map { row => * row(1) -> row(2) // string columns #1 and #2 @@ -114,8 +113,9 @@ trait Row { // Data per column name private lazy val columnsDictionary: Map[String, Any] = - Compat.toMap(Compat.lazyZip(metaData.ms, data)) { case (m, v) => - m.column.qualified.toUpperCase -> v + Compat.toMap(Compat.lazyZip(metaData.ms, data)) { + case (m, v) => + m.column.qualified.toUpperCase -> v } // Data per column alias diff --git a/core/src/main/scala/anorm/RowParser.scala b/core/src/main/scala/anorm/RowParser.scala new file mode 100644 index 00000000..1da70904 --- /dev/null +++ b/core/src/main/scala/anorm/RowParser.scala @@ -0,0 +1,180 @@ +package anorm + +trait RowParser[+A] extends (Row => SqlResult[A]) { parent => + + /** + * Returns a parser that will apply given function `f` + * to the result of this first parser. If the current parser is not + * successful, the new one will return encountered [[Error]]. + * + * @param f Function applied on the successful parser result + * + * {{{ + * import anorm.{ RowParser, SqlParser } + * + * val parser: RowParser[Int] = SqlParser.str("col").map(_.length) + * // Prepares a parser that first get 'col' string value, + * // and then returns the length of that + * }}} + */ + def map[B](f: A => B): RowParser[B] = RowParser(parent.andThen(_.map(f))) + + /** + * Returns parser which collects information + * from already parsed row data using `f`. + * + * @param otherwise Message returned as error if nothing can be collected using `f`. + * @param f Collecting function + */ + def collect[B](otherwise: String)(f: PartialFunction[A, B]): RowParser[B] = + RowParser(parent(_).flatMap(f.lift(_).fold[SqlResult[B]](Error(SqlMappingError(otherwise)))(Success(_)))) + + def flatMap[B](k: A => RowParser[B]): RowParser[B] = + RowParser(row => parent(row).flatMap(k(_)(row))) + + /** + * Combines this parser on the left of the parser `p` given as argument. + * + * @param p Parser on the right + * + * {{{ + * import anorm._, SqlParser.{ int, str } + * + * def populations(implicit con: java.sql.Connection): List[String ~ Int] = + * SQL("SELECT * FROM Country").as((str("name") ~ int("population")).*) + * }}} + */ + def ~[B](p: RowParser[B]): RowParser[A ~ B] = + RowParser(row => parent(row).flatMap(a => p(row).map(new ~(a, _)))) + + /** + * Combines this current parser with the one given as argument `p`, + * if and only if the current parser can first/on left side successfully + * parse a row, without keeping these values in parsed result. + * + * {{{ + * import anorm._, SqlParser.{ int, str } + * + * def string(implicit con: java.sql.Connection) = SQL("SELECT * FROM test"). + * as((int("id") ~> str("val")).single) + * // row has to have an int column 'id' and a string 'val' one, + * // keeping only 'val' in result + * }}} + */ + def ~>[B](p: RowParser[B]): RowParser[B] = + RowParser(row => parent(row).flatMap(_ => p(row))) + + /** + * Combines this current parser with the one given as argument `p`, + * if and only if the current parser can first successfully + * parse a row, without keeping the values of the parser `p`. + * + * {{{ + * import anorm._, SqlParser.{ int, str } + * + * def i(implicit con: java.sql.Connection) = SQL("SELECT * FROM test"). + * as((int("id") <~ str("val")).single) + * // row has to have an int column 'id' and a string 'val' one, + * // keeping only 'id' in result + * }}} + */ + def <~[B](p: RowParser[B]): RowParser[A] = parent.~(p).map(_._1) + + // TODO: Scaladoc + def |[B >: A](p: RowParser[B]): RowParser[B] = RowParser { row => + parent(row) match { + case Error(_) => p(row) + case a => a + } + } + + /** + * Returns a row parser for optional column, + * that will turn missing or null column as None. + */ + def ? : RowParser[Option[A]] = RowParser { + parent(_) match { + case Success(a) => Success(Some(a)) + case Error(ColumnNotFound(_, _)) => + Success(None) + + case e @ Error(_) => e + } + } + + /** Alias for [[flatMap]] */ + def >>[B](f: A => RowParser[B]): RowParser[B] = flatMap(f) + + /** + * Returns possibly empty list parsed from result. + * + * {{{ + * import anorm._, SqlParser.scalar + * + * val price = 125 + * + * def foo(implicit con: java.sql.Connection) = + * SQL"SELECT name FROM item WHERE price < \\$price".as(scalar[String].*) + * }}} + */ + def * : ResultSetParser[List[A]] = ResultSetParser.list(parent) + + /** + * Returns non empty list parse from result, + * or raise error if there is no result. + * + * {{{ + * import anorm._, SqlParser.str + * + * def foo(implicit con: java.sql.Connection) = { + * val parser = str("title") ~ str("descr") + * SQL("SELECT title, descr FROM pages").as(parser.+) // at least 1 page + * } + * }}} + */ + def + : ResultSetParser[List[A]] = ResultSetParser.nonEmptyList(parent) + + /** + * Returns a result set parser expecting exactly one row to parse. + * + * {{{ + * import anorm._, SqlParser.scalar + * + * def b(implicit con: java.sql.Connection): Boolean = + * SQL("SELECT flag FROM Test WHERE id = :id"). + * on("id" -> 1).as(scalar[Boolean].single) + * }}} + * + * @see #singleOpt + */ + def single = ResultSetParser.single(parent) + + /** + * Returns a result set parser for none or one parsed row. + * + * {{{ + * import anorm._, SqlParser.scalar + * + * def name(implicit con: java.sql.Connection): Option[String] = + * SQL("SELECT name FROM Country WHERE lang = :lang") + * .on("lang" -> "notFound").as(scalar[String].singleOpt) + * }}} + */ + def singleOpt: ResultSetParser[Option[A]] = ResultSetParser.singleOpt(parent) + +} + +object RowParser { + def apply[A](f: Row => SqlResult[A]): RowParser[A] = new RowParser[A] { + def apply(row: Row): SqlResult[A] = f(row) + } + + /** Row parser that result in successfully unchanged row. */ + object successful extends RowParser[Row] { + def apply(row: Row): SqlResult[Row] = Success(row) + } + + def failed[A](error: => Error): RowParser[A] = new RowParser[A] { + def apply(row: Row): SqlResult[A] = error + } +} diff --git a/core/src/main/scala/anorm/SimpleSql.scala b/core/src/main/scala/anorm/SimpleSql.scala index b1178996..fc462788 100644 --- a/core/src/main/scala/anorm/SimpleSql.scala +++ b/core/src/main/scala/anorm/SimpleSql.scala @@ -92,16 +92,7 @@ case class SimpleSql[T]( stmt } - /** - * Prepares query with given row parser. - * - * {{{ - * import anorm.{ SQL, SqlParser } - * - * val res = SQL("SELECT 1").using(SqlParser.scalar[Int]) - * // See: SQL("SELECT 1").as(SqlParser.scalar[Int].single) - * }}} - */ + /** Prepares query with given row parser. */ @deprecated(message = "Use [[as]]", since = "2.5.1") def using[U](p: RowParser[U]): SimpleSql[U] = copy(sql, params, p) diff --git a/core/src/main/scala/anorm/SqlParser.scala b/core/src/main/scala/anorm/SqlParser.scala index 57572b4c..1b5d42e4 100644 --- a/core/src/main/scala/anorm/SqlParser.scala +++ b/core/src/main/scala/anorm/SqlParser.scala @@ -13,7 +13,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Returns a parser for a scalar not-null value. * * {{{ - * import anorm.{ SQL, SqlParser }, SqlParser.scalar + * import anorm._, SqlParser.scalar * * def count(implicit con: java.sql.Connection) = * SQL("select count(*) from Country").as(scalar[Long].single) @@ -40,7 +40,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Returns a parser that fold over the row. * * {{{ - * import anorm.{ RowParser, SqlParser } + * import anorm._ * * def p(implicit con: java.sql.Connection): RowParser[List[(Any, String)]] = * SqlParser.folder(List.empty[(Any, String)]) { (ls, v, m) => @@ -67,7 +67,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Flatten columns tuple-like. * * {{{ - * import anorm.{ SQL, SqlParser }, SqlParser.{ long, str, int } + * import anorm._, SqlParser.{ long, str, int } * * def tuple(implicit con: java.sql.Connection): (Long, String, Int) = * SQL("SELECT a, b, c FROM Test"). @@ -110,7 +110,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as float. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Float, String) = * SQL("SELECT a, b FROM test").as( @@ -126,7 +126,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Float, String) = * SQL("SELECT a, b FROM test").as( @@ -141,7 +141,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as string. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Float, String) = * SQL("SELECT a, b FROM test").as( @@ -157,7 +157,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Float, String) = * SQL("SELECT a, b FROM test").as( @@ -173,7 +173,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * * {{{ * import java.io.InputStream - * import anorm.{ SQL, SqlParser } + * import anorm._ * * val parser = (SqlParser.str("name") ~ SqlParser.binaryStream("data")). * map(SqlParser.flatten) @@ -190,7 +190,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * * {{{ * import java.io.InputStream - * import anorm.{ SQL, SqlParser } + * import anorm._ * * val parser = * (SqlParser.str(1) ~ SqlParser.binaryStream(2)).map(SqlParser.flatten) @@ -206,7 +206,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as boolean. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Boolean, String) = * SQL("SELECT a, b FROM test").as( @@ -221,7 +221,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Boolean, String) = * SQL("SELECT a, b FROM test").as( @@ -235,7 +235,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as byte. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Byte, String) = * SQL("SELECT a, b FROM test").as( @@ -251,7 +251,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Byte, String) = * SQL("SELECT a, b FROM test").as( @@ -266,7 +266,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as binary stream. * * {{{ - * import anorm.{ SQL, SqlParser }, SqlParser.{ str, byteArray } + * import anorm._, SqlParser.{ str, byteArray } * * val parser = (str("name") ~ byteArray("data")).map(SqlParser.flatten) * @@ -281,7 +281,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as binary stream. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * val parser = * (SqlParser.str(1) ~ SqlParser.byteArray(2)).map(SqlParser.flatten) @@ -297,7 +297,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as double. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Double, String) = * SQL("SELECT a, b FROM test").as( @@ -312,7 +312,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Double, String) = * SQL("SELECT a, b FROM test").as( @@ -326,7 +326,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as short. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Short, String) = * SQL("SELECT a, b FROM test").as( @@ -342,7 +342,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Short, String) = * SQL("SELECT a, b FROM test").as( @@ -357,7 +357,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as integer. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Int, String) = * SQL("SELECT a, b FROM test").as( @@ -373,7 +373,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Int, String) = * SQL("SELECT a, b FROM test") @@ -388,7 +388,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified array column as list. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (String, List[String]) = * SQL("SELECT a, sqlArrayOfString FROM test") @@ -403,7 +403,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (String, List[String]) = * SQL("SELECT a, sqlArrayOfString FROM test") @@ -417,7 +417,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as long. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Long, String) = * SQL("SELECT a, b FROM test").as( @@ -433,7 +433,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (Long, String) = * SQL("SELECT a, b FROM test").as( @@ -448,7 +448,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * Parses specified column as date. * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (java.util.Date, String) = * SQL("SELECT a, b FROM test").as( @@ -464,7 +464,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param columnPosition from 1 to n * * {{{ - * import anorm.{ SQL, SqlParser } + * import anorm._ * * def t(implicit con: java.sql.Connection): (java.util.Date, String) = * SQL("SELECT a, b FROM test").as( @@ -480,8 +480,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * @param name Column name * * {{{ - * import anorm.SQL - * import anorm.SqlParser.get + * import anorm._, SqlParser.get * * def title(implicit con: java.sql.Connection): String = * SQL("SELECT title FROM Books").as(get[String]("title").single) @@ -524,8 +523,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { * and matching expected `value`. * * {{{ - * import anorm.SQL - * import anorm.SqlParser.matches + * import anorm._, SqlParser.matches * * def m(implicit con: java.sql.Connection): Boolean = * SQL("SELECT * FROM table").as(matches("a", 1.2f).single) @@ -551,7 +549,7 @@ object SqlParser extends FunctionAdapter with DeprecatedSqlParser { } @deprecated("Do not use these combinators", "2.5.4") -sealed trait DeprecatedSqlParser { _: SqlParser.type => +sealed trait DeprecatedSqlParser { _parser: SqlParser.type => @deprecated("Use `matches[T]`", "2.5.4") @SuppressWarnings(Array("AsInstanceOf")) @@ -567,185 +565,6 @@ sealed trait DeprecatedSqlParser { _: SqlParser.type => @SuppressWarnings(Array("ClassNames")) final case class ~[+A, +B](_1: A, _2: B) -object RowParser { - def apply[A](f: Row => SqlResult[A]): RowParser[A] = new RowParser[A] { - def apply(row: Row): SqlResult[A] = f(row) - } - - /** Row parser that result in successfully unchanged row. */ - object successful extends RowParser[Row] { - def apply(row: Row): SqlResult[Row] = Success(row) - } - - def failed[A](error: => Error): RowParser[A] = new RowParser[A] { - def apply(row: Row): SqlResult[A] = error - } -} - -trait RowParser[+A] extends (Row => SqlResult[A]) { parent => - - /** - * Returns a parser that will apply given function `f` - * to the result of this first parser. If the current parser is not - * successful, the new one will return encountered [[Error]]. - * - * @param f Function applied on the successful parser result - * - * {{{ - * import anorm.{ RowParser, SqlParser } - * - * val parser: RowParser[Int] = SqlParser.str("col").map(_.length) - * // Prepares a parser that first get 'col' string value, - * // and then returns the length of that - * }}} - */ - def map[B](f: A => B): RowParser[B] = RowParser(parent.andThen(_.map(f))) - - /** - * Returns parser which collects information - * from already parsed row data using `f`. - * - * @param otherwise Message returned as error if nothing can be collected using `f`. - * @param f Collecting function - */ - def collect[B](otherwise: String)(f: PartialFunction[A, B]): RowParser[B] = - RowParser(parent(_).flatMap(f.lift(_).fold[SqlResult[B]](Error(SqlMappingError(otherwise)))(Success(_)))) - - def flatMap[B](k: A => RowParser[B]): RowParser[B] = - RowParser(row => parent(row).flatMap(k(_)(row))) - - /** - * Combines this parser on the left of the parser `p` given as argument. - * - * @param p Parser on the right - * - * {{{ - * import anorm._, SqlParser.{ int, str } - * - * def populations(implicit con: java.sql.Connection): List[String ~ Int] = - * SQL("SELECT * FROM Country").as((str("name") ~ int("population")).*) - * }}} - */ - def ~[B](p: RowParser[B]): RowParser[A ~ B] = - RowParser(row => parent(row).flatMap(a => p(row).map(new ~(a, _)))) - - /** - * Combines this current parser with the one given as argument `p`, - * if and only if the current parser can first/on left side successfully - * parse a row, without keeping these values in parsed result. - * - * {{{ - * import anorm._, SqlParser.{ int, str } - * - * def string(implicit con: java.sql.Connection) = SQL("SELECT * FROM test"). - * as((int("id") ~> str("val")).single) - * // row has to have an int column 'id' and a string 'val' one, - * // keeping only 'val' in result - * }}} - */ - def ~>[B](p: RowParser[B]): RowParser[B] = - RowParser(row => parent(row).flatMap(_ => p(row))) - - /** - * Combines this current parser with the one given as argument `p`, - * if and only if the current parser can first successfully - * parse a row, without keeping the values of the parser `p`. - * - * {{{ - * import anorm._, SqlParser.{ int, str } - * - * def i(implicit con: java.sql.Connection) = SQL("SELECT * FROM test"). - * as((int("id") <~ str("val")).single) - * // row has to have an int column 'id' and a string 'val' one, - * // keeping only 'id' in result - * }}} - */ - def <~[B](p: RowParser[B]): RowParser[A] = parent.~(p).map(_._1) - - // TODO: Scaladoc - def |[B >: A](p: RowParser[B]): RowParser[B] = RowParser { row => - parent(row) match { - case Error(_) => p(row) - case a => a - } - } - - /** - * Returns a row parser for optional column, - * that will turn missing or null column as None. - */ - def ? : RowParser[Option[A]] = RowParser { - parent(_) match { - case Success(a) => Success(Some(a)) - case Error(ColumnNotFound(_, _)) => - Success(None) - - case e @ Error(_) => e - } - } - - /** Alias for [[flatMap]] */ - def >>[B](f: A => RowParser[B]): RowParser[B] = flatMap(f) - - /** - * Returns possibly empty list parsed from result. - * - * {{{ - * import anorm._, SqlParser.scalar - * - * val price = 125 - * - * def foo(implicit con: java.sql.Connection) = - * SQL"SELECT name FROM item WHERE price < \\$price".as(scalar[String].*) - * }}} - */ - def * : ResultSetParser[List[A]] = ResultSetParser.list(parent) - - /** - * Returns non empty list parse from result, - * or raise error if there is no result. - * - * {{{ - * import anorm._, SqlParser.str - * - * def foo(implicit con: java.sql.Connection) = { - * val parser = str("title") ~ str("descr") - * SQL("SELECT title, descr FROM pages").as(parser.+) // at least 1 page - * } - * }}} - */ - def + : ResultSetParser[List[A]] = ResultSetParser.nonEmptyList(parent) - - /** - * Returns a result set parser expecting exactly one row to parse. - * - * {{{ - * import anorm._, SqlParser.scalar - * - * def b(implicit con: java.sql.Connection): Boolean = - * SQL("SELECT flag FROM Test WHERE id = :id"). - * on("id" -> 1).as(scalar[Boolean].single) - * }}} - * - * @see #singleOpt - */ - def single = ResultSetParser.single(parent) - - /** - * Returns a result set parser for none or one parsed row. - * - * {{{ - * import anorm._, SqlParser.scalar - * - * def name(implicit con: java.sql.Connection): Option[String] = - * SQL("SELECT name FROM Country WHERE lang = :lang") - * .on("lang" -> "notFound").as(scalar[String].singleOpt) - * }}} - */ - def singleOpt: ResultSetParser[Option[A]] = ResultSetParser.singleOpt(parent) - -} - /** Parser for scalar row (row of one single column). */ sealed trait ScalarRowParser[+A] extends RowParser[A] { override def singleOpt: ResultSetParser[Option[A]] = ResultSetParser { diff --git a/core/src/main/scala/anorm/SqlQueryResult.scala b/core/src/main/scala/anorm/SqlQueryResult.scala index 8cb20a75..9e8ffece 100644 --- a/core/src/main/scala/anorm/SqlQueryResult.scala +++ b/core/src/main/scala/anorm/SqlQueryResult.scala @@ -11,8 +11,10 @@ import resource.ManagedResource * @constructor create a result with a result set * @param resultSet Result set from executed query */ -final case class SqlQueryResult(resultSet: ManagedResource[java.sql.ResultSet], resultSetOnFirstRow: Boolean = false) - extends WithResult { +final case class SqlQueryResult( + resultSet: ManagedResource[java.sql.ResultSet], + resultSetOnFirstRow: Boolean = false +) extends WithResult { protected def resultSet(c: Connection) = resultSet @@ -41,7 +43,9 @@ final case class SqlQueryResult(resultSet: ManagedResource[java.sql.ResultSet], * } * }}} */ - def statementWarning: Option[SQLWarning] = - statement.acquireFor(_.getWarnings).fold[Option[SQLWarning]](_.headOption.map(new SQLWarning(_)), Option(_)) + def statementWarning: Option[SQLWarning] = { + import resource.extractedEitherToEither + statement.acquireFor(_.getWarnings).fold[Option[SQLWarning]](_.headOption.map(new SQLWarning(_)), Option(_)) + } } diff --git a/core/src/main/scala/anorm/SqlRequestError.scala b/core/src/main/scala/anorm/SqlRequestError.scala index fbb516c1..d9300fbe 100644 --- a/core/src/main/scala/anorm/SqlRequestError.scala +++ b/core/src/main/scala/anorm/SqlRequestError.scala @@ -28,8 +28,11 @@ object SqlRequestError { * @param column the name of the not found column * @param available the names of the available columns */ -case class ColumnNotFound(column: String, @deprecatedName(Symbol("possibilities")) available: Seq[String]) - extends SqlRequestError { +case class ColumnNotFound( + column: String, + @deprecatedName(Symbol("possibilities")) available: Seq[String] +) extends SqlRequestError { + @deprecated("Use constructor with `available` sequence", "2.5.4") def this(column: String, possibilities: List[String]) = this(column, available = possibilities.toSeq) @@ -40,10 +43,6 @@ case class ColumnNotFound(column: String, @deprecatedName(Symbol("possibilities" @deprecated("Use `available`", "2.5.4") def possibilities = available.toList - @deprecated("Use copy with `available`", "2.5.4") - def copy(column: String = this.column, possibilities: List[String] = this.possibilities): ColumnNotFound = - ColumnNotFound(column, possibilities.toSeq) - override lazy val toString = message } diff --git a/core/src/main/scala/anorm/SqlResult.scala b/core/src/main/scala/anorm/SqlResult.scala index d22cde7c..d56cf80a 100644 --- a/core/src/main/scala/anorm/SqlResult.scala +++ b/core/src/main/scala/anorm/SqlResult.scala @@ -14,6 +14,20 @@ sealed trait SqlResult[+A] { self => case e @ Error(_) => e } + def collect[B](f: PartialFunction[A, B]): SqlResult[B] = self match { + case Success(a) => + f.lift(a) match { + case Some(b) => + Success(b) + + case None => + Error(SqlMappingError(s"Value ${a} is not matching")) + } + + case Error(cause) => + Error(cause) + } + /** * Either applies function `e` if result is erroneous, * or function `f` with successful result if any. @@ -174,8 +188,11 @@ private[anorm] trait WithResult { */ def withResult[T](op: Option[Cursor] => T, aliaser: ColumnAliaser)(implicit connection: Connection - ): Either[List[Throwable], T] = + ): Either[List[Throwable], T] = { + import resource.extractedEitherToEither + Sql.withResult(resultSet(connection), resultSetOnFirstRow, aliaser)(op).acquireFor(identity) + } /** * Converts this query result as `T`, using parser. diff --git a/core/src/main/scala/anorm/SqlStatementParser.scala b/core/src/main/scala/anorm/SqlStatementParser.scala index b4ab96cc..0eae799b 100644 --- a/core/src/main/scala/anorm/SqlStatementParser.scala +++ b/core/src/main/scala/anorm/SqlStatementParser.scala @@ -89,8 +89,9 @@ object SqlStatementParser extends JavaTokenParsers { } private val variable: Parser[TokenGroup] = - "{" ~> (ident ~ (("." ~> ident) ?)) <~ "}" ^^ { case i1 ~ i2 => - TokenGroup(Nil, Some(i1 + i2.fold("")("." + _))) + "{" ~> (ident ~ (("." ~> ident) ?)) <~ "}" ^^ { + case i1 ~ i2 => + TokenGroup(Nil, Some(i1 + i2.fold("")("." + _))) } private val reserved: Parser[PercentToken.type] = diff --git a/core/src/main/scala/anorm/ToSql.scala b/core/src/main/scala/anorm/ToSql.scala index 8dffbb92..f04cd39a 100644 --- a/core/src/main/scala/anorm/ToSql.scala +++ b/core/src/main/scala/anorm/ToSql.scala @@ -85,20 +85,21 @@ object ToSql { traversableToSql[A, Vector[A]] /** Returns fragment for each value, with custom formatting. */ - implicit def seqParamToSql[A](implicit conv: ToSql[A] = ToSql.missing[A]) = + implicit def seqParamToSql[A](implicit conv: ToSql[A] = ToSql.missing[A]): ToSql[SeqParameter[A]] = ToSql[SeqParameter[A]] { p => val before = p.before.getOrElse("") val after = p.after.getOrElse("") val c: A => (String, Int) = if (conv == null) _ => "?" -> 1 else conv.fragment - val sql = p.values.foldLeft(new StringBuilder() -> 0) { case ((sb, i), v) => - val frag = c(v) - val st = - if (i > 0) sb ++= p.separator ++= before ++= frag._1 - else sb ++= before ++= frag._1 + val sql = p.values.foldLeft(new StringBuilder() -> 0) { + case ((sb, i), v) => + val frag = c(v) + val st = + if (i > 0) sb ++= p.separator ++= before ++= frag._1 + else sb ++= before ++= frag._1 - (st ++= after, i + frag._2) + (st ++= after, i + frag._2) } sql._1.toString -> sql._2 @@ -108,11 +109,12 @@ object ToSql { val c: A => (String, Int) = if (conv == null) _ => "?" -> 1 else conv.fragment - val sql = values.foldLeft(new StringBuilder() -> 0) { case ((sb, i), v) => - val frag = c(v) - val st = if (i > 0) sb ++= ", " ++= frag._1 else sb ++= frag._1 + val sql = values.foldLeft(new StringBuilder() -> 0) { + case ((sb, i), v) => + val frag = c(v) + val st = if (i > 0) sb ++= ", " ++= frag._1 else sb ++= frag._1 - (st, i + frag._2) + (st, i + frag._2) } sql._1.toString -> sql._2 diff --git a/core/src/main/scala/anorm/ToStatementMisc.scala b/core/src/main/scala/anorm/ToStatementMisc.scala index 8cd6aae6..2746104b 100644 --- a/core/src/main/scala/anorm/ToStatementMisc.scala +++ b/core/src/main/scala/anorm/ToStatementMisc.scala @@ -32,8 +32,11 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `LONGVARBINARY` is called on statement. * * {{{ + * // skip-doc-5f98a5e + * import anorm._ + * * def foo(inputStream: java.io.InputStream) = - * anorm.SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> inputStream) + * SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> inputStream) * }}} */ implicit def binaryStreamToStatement[S <: InputStream]: ToStatement[S] = @@ -49,11 +52,14 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `BLOB` is called on statement. * * {{{ + * // skip-doc-5f98a5e + * import anorm._ + * * def foo(byteArray: Array[Byte])(implicit con: java.sql.Connection) = { * val blob = con.createBlob() * blob.setBytes(1, byteArray) * - * anorm.SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> blob) + * SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> blob) * } * }}} */ @@ -70,8 +76,11 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `VARCHAR` is called on statement. * * {{{ + * // skip-doc-5f98a5e + * import anorm._ + * * def foo(reader: java.io.Reader) = - * anorm.SQL("INSERT INTO Table(chars) VALUES {c}").on("c" -> reader) + * SQL("INSERT INTO Table(chars) VALUES {c}").on("c" -> reader) * }}} */ implicit def characterStreamToStatement[R <: Reader]: ToStatement[R] = @@ -86,7 +95,9 @@ sealed trait ToStatementPriority0 { * Sets boolean value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE enabled = {b}").on('b -> true) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE enabled = {b}").on("b" -> true) * }}} */ implicit object booleanToStatement extends ToStatement[Boolean] { @@ -98,8 +109,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `BOOLEAN` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE enabled = {b}"). - * on('b -> java.lang.Boolean.TRUE) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE enabled = {b}"). + * on("b" -> java.lang.Boolean.TRUE) * }}} */ implicit object javaBooleanToStatement extends ToStatement[JBool] { @@ -112,7 +125,9 @@ sealed trait ToStatementPriority0 { * Sets byte value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1.toByte) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1.toByte) * }}} */ implicit object byteToStatement extends ToStatement[Byte] { @@ -124,8 +139,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `TINYINT` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Byte(1: Byte)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Byte(1: Byte)) * }}} */ implicit object javaByteToStatement extends ToStatement[JByte] { @@ -138,7 +155,9 @@ sealed trait ToStatementPriority0 { * Sets double value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1D) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1D) * }}} */ implicit object doubleToStatement extends ToStatement[Double] { @@ -150,8 +169,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `DOUBLE` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Double(1D)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Double(1D)) * }}} */ implicit object javaDoubleToStatement extends ToStatement[JDouble] { @@ -164,7 +185,9 @@ sealed trait ToStatementPriority0 { * Sets float value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1F) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1F) * }}} */ implicit object floatToStatement extends ToStatement[Float] { @@ -176,8 +199,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `FLOAT` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Float(1F)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Float(1F)) * }}} */ implicit object javaFloatToStatement extends ToStatement[JFloat] { @@ -190,7 +215,9 @@ sealed trait ToStatementPriority0 { * Sets long value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1L) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1L) * }}} */ implicit object longToStatement extends ToStatement[Long] { @@ -202,8 +229,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `BIGINT` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Long(1L)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Long(1L)) * }}} */ implicit object javaLongToStatement extends ToStatement[JLong] { @@ -216,7 +245,9 @@ sealed trait ToStatementPriority0 { * Sets integer value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1) * }}} */ implicit object intToStatement extends ToStatement[Int] { @@ -228,8 +259,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `INTEGER` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Integer(1)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Integer(1)) * }}} */ implicit object integerToStatement extends ToStatement[Integer] { @@ -242,7 +275,9 @@ sealed trait ToStatementPriority0 { * Sets short value on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}").on('b -> 1.toShort) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}").on("b" -> 1.toShort) * }}} */ implicit object shortToStatement extends ToStatement[Short] { @@ -254,8 +289,10 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `SMALLINT` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE flag = {b}"). - * on('b -> new java.lang.Short(1.toShort)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE flag = {b}"). + * on("b" -> new java.lang.Short(1.toShort)) * }}} */ implicit object javaShortToStatement extends ToStatement[JShort] { @@ -269,7 +306,9 @@ sealed trait ToStatementPriority0 { * For `null` character, `setNull` with `VARCHAR` is called on statement. * * {{{ - * anorm.SQL("SELECT * FROM tbl WHERE flag = {c}"). + * import anorm._ + * + * SQL("SELECT * FROM tbl WHERE flag = {c}"). * on("c" -> new java.lang.Character('f')) * }}} */ @@ -284,7 +323,9 @@ sealed trait ToStatementPriority0 { * Sets character as parameter value. * * {{{ - * anorm.SQL("SELECT * FROM tbl WHERE flag = {c}").on("c" -> 'f') + * import anorm._ + * + * SQL("SELECT * FROM tbl WHERE flag = {c}").on("c" -> 'f') * }}} */ implicit object charToStatement extends ToStatement[Char] { @@ -297,7 +338,9 @@ sealed trait ToStatementPriority0 { * Value `null` is accepted. * * {{{ - * anorm.SQL("SELECT * FROM tbl WHERE name = {n}").on("n" -> "str") + * import anorm._ + * + * SQL("SELECT * FROM tbl WHERE name = {n}").on("n" -> "str") * }}} */ implicit object stringToStatement extends ToStatement[String] { @@ -308,11 +351,13 @@ sealed trait ToStatementPriority0 { * Sets null for None value. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE category = {c}").on('c -> None) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE category = {c}").on("c" -> None) * * // Rather use: * anorm.SQL("SELECT * FROM Test WHERE category = {c}"). - * on('c -> Option.empty[String]) // Not deprecated + * on("c" -> Option.empty[String]) // Not deprecated * }}} */ @deprecated("Parameter value should be passed using `Option.empty[T]`", since = "2.3.7") @@ -325,10 +370,12 @@ sealed trait ToStatementPriority0 { * Sets not empty optional A inferred as Some[A]. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE category = {c}").on('c -> Some("cat")) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE category = {c}").on("c" -> Some("cat")) * }}} */ - implicit def someToStatement[A](implicit c: ToStatement[A]) = + implicit def someToStatement[A](implicit c: ToStatement[A]): ToStatement[Some[A]] = new ToStatement[Some[A]] with NotNullGuard { def set(s: PreparedStatement, index: Int, v: Some[A]): Unit = c.set(s, index, v.get) @@ -340,24 +387,26 @@ sealed trait ToStatementPriority0 { * {{{ * import anorm._ * - * SQL("SELECT * FROM Test WHERE category = {c}").on('c -> Option("cat")) + * SQL("SELECT * FROM Test WHERE category = {c}").on("c" -> Option("cat")) * SQL"SELECT * FROM Test WHERE nullable_int = \\${Option.empty[Int]}" * }}} */ - implicit def optionToStatement[A](implicit c: ToStatement[A], meta: ParameterMetaData[A]) = new ToStatement[Option[A]] - with NotNullGuard { - def set(s: PreparedStatement, index: Int, o: Option[A]) = { - o.fold[Unit](s.setNull(index, meta.jdbcType))(c.set(s, index, _)) + implicit def optionToStatement[A](implicit c: ToStatement[A], meta: ParameterMetaData[A]): ToStatement[Option[A]] = + new ToStatement[Option[A]] with NotNullGuard { + def set(s: PreparedStatement, index: Int, o: Option[A]) = { + o.fold[Unit](s.setNull(index, meta.jdbcType))(c.set(s, index, _)) + } } - } /** * Sets Java big integer on statement. * For `null` value, `setNull` with `NUMERIC` is called on statement. * * {{{ - * anorm.SQL("UPDATE tbl SET max = {m}"). - * on('m -> new java.math.BigInteger("15")) + * import anorm._ + * + * SQL("UPDATE tbl SET max = {m}"). + * on("m" -> new java.math.BigInteger("15")) * }}} */ implicit object javaBigIntegerToStatement extends ToStatement[BigInteger] { @@ -372,7 +421,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `NUMERIC` is called on statement. * * {{{ - * anorm.SQL("UPDATE tbl SET max = {m}").on('m -> BigInt(15)) + * import anorm._ + * + * SQL("UPDATE tbl SET max = {m}").on("m" -> BigInt(15)) * }}} */ implicit object scalaBigIntegerToStatement extends ToStatement[BigInt] { @@ -387,8 +438,10 @@ sealed trait ToStatementPriority0 { * Value `null` is accepted. * * {{{ - * anorm.SQL("UPDATE tbl SET max = {m}"). - * on('m -> new java.math.BigDecimal(10.02F)) + * import anorm._ + * + * SQL("UPDATE tbl SET max = {m}"). + * on("m" -> new java.math.BigDecimal(10.02F)) * }}} */ implicit object javaBigDecimalToStatement extends ToStatement[JBigDec] { @@ -401,7 +454,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `DECIMAL` is called on statement. * * {{{ - * anorm.SQL("UPDATE tbl SET max = {m}").on('m -> BigDecimal(10.02F)) + * import anorm._ + * + * SQL("UPDATE tbl SET max = {m}").on("m" -> BigDecimal(10.02F)) * }}} */ implicit object scalaBigDecimalToStatement extends ToStatement[BigDecimal] { @@ -416,9 +471,11 @@ sealed trait ToStatementPriority0 { * Value `null` is accepted. * * {{{ + * import anorm._ + * * def foo(date: java.util.Date) = - * anorm.SQL("UPDATE tbl SET modified = {ts}"). - * on('ts -> new java.sql.Timestamp(date.getTime)) + * SQL("UPDATE tbl SET modified = {ts}"). + * on("ts" -> new java.sql.Timestamp(date.getTime)) * }}} */ implicit object timestampToStatement extends ToStatement[Timestamp] { @@ -431,7 +488,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `TIMESTAMP` is called on statement. * * {{{ - * anorm.SQL("UPDATE tbl SET modified = {d}").on('d -> new java.util.Date()) + * import anorm._ + * + * SQL("UPDATE tbl SET modified = {d}").on("d" -> new java.util.Date()) * }}} */ implicit object dateToStatement extends ToStatement[java.util.Date] { @@ -447,12 +506,15 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `TIMESTAMP` is called on statement. * * {{{ + * // skip-doc-5f98a5e + * import anorm._ + * * val wrapper = new { * // Any value with a `.getTimestamp` * val getTimestamp = new java.sql.Timestamp(123L) * } * - * anorm.SQL("UPDATE tbl SET modified = {ts}").on('ts -> wrapper) + * SQL("UPDATE tbl SET modified = {ts}").on("ts" -> wrapper) * }}} */ implicit def timestampWrapper1ToStatement[T <: TimestampWrapper1]: ToStatement[T] = new ToStatement[T] { @@ -472,7 +534,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `VARCHAR` is called on statement. * * {{{ - * anorm.SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). + * import anorm._ + * + * SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). * on("i" -> java.util.UUID.randomUUID(), "n" -> "lang") * }}} */ @@ -488,7 +552,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `VARCHAR` is called on statement. * * {{{ - * anorm.SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). + * import anorm._ + * + * SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). * on("i" -> new java.net.URI("https://github.com/playframework/"), * "n" -> "lang") * }}} @@ -505,7 +571,9 @@ sealed trait ToStatementPriority0 { * For `null` value, `setNull` with `VARCHAR` is called on statement. * * {{{ - * anorm.SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). + * import anorm._ + * + * SQL("INSERT INTO lang_tbl(id, name) VALUE ({i}, {n})"). * on("i" -> new java.net.URL("https://github.com/playframework/"), * "n" -> "lang") * }}} @@ -522,8 +590,10 @@ sealed trait ToStatementPriority0 { * UNSAFE: It's set using `java.sql.PreparedStatement.setObject`. * * {{{ - * anorm.SQL("EXEC indexed_at {d}"). - * on('d -> anorm.Object(new java.util.Date())) + * import anorm._ + * + * SQL("EXEC indexed_at {d}"). + * on("d" -> anorm.Object(new java.util.Date())) * }}} */ implicit object objectToStatement extends ToStatement[anorm.Object] { @@ -535,8 +605,10 @@ sealed trait ToStatementPriority0 { * Sets multi-value parameter from list on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE cat IN ({categories})"). - * on('categories -> List(1, 3, 4)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE cat IN ({categories})"). + * on("categories" -> List(1, 3, 4)) * }}} */ implicit def listToStatement[A](implicit c: ToStatement[A]): ToStatement[List[A]] = traversableToStatement[A, List[A]] @@ -545,8 +617,10 @@ sealed trait ToStatementPriority0 { * Sets multi-value parameter from sequence on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE cat IN ({categories})"). - * on('categories -> Seq("a", "b", "c")) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE cat IN ({categories})"). + * on("categories" -> Seq("a", "b", "c")) * }}} */ implicit def seqToStatement[A](implicit c: ToStatement[A]): ToStatement[Seq[A]] = traversableToStatement[A, Seq[A]] @@ -555,8 +629,10 @@ sealed trait ToStatementPriority0 { * Sets multi-value parameter from set on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE cat IN ({categories})"). - * on('categories -> Set(1, 3, 4)) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE cat IN ({categories})"). + * on("categories" -> Set(1, 3, 4)) * }}} */ implicit def setToStatement[A](implicit c: ToStatement[A]): ToStatement[Set[A]] = traversableToStatement[A, Set[A]] @@ -567,8 +643,10 @@ sealed trait ToStatementPriority0 { * {{{ * import scala.collection.immutable.SortedSet * - * anorm.SQL("SELECT * FROM Test WHERE cat IN ({categories})"). - * on('categories -> SortedSet("a", "b", "c")) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE cat IN ({categories})"). + * on("categories" -> SortedSet("a", "b", "c")) * }}} */ implicit def sortedSetToStatement[A](implicit c: ToStatement[A]): ToStatement[SortedSet[A]] = @@ -584,8 +662,10 @@ sealed trait ToStatementPriority0 { * Sets multi-value parameter from vector on statement. * * {{{ - * anorm.SQL("SELECT * FROM Test WHERE cat IN ({categories})"). - * on('categories -> Vector("a", "b", "c")) + * import anorm._ + * + * SQL("SELECT * FROM Test WHERE cat IN ({categories})"). + * on("categories" -> Vector("a", "b", "c")) * }}} */ implicit def vectorToStatement[A](implicit c: ToStatement[A]): ToStatement[Vector[A]] = @@ -596,16 +676,16 @@ sealed trait ToStatementPriority0 { * (using [[SeqParameter]]). * * {{{ - * import anorm.SeqParameter + * import anorm._ * - * anorm.SQL("SELECT * FROM Test t WHERE {categories}"). - * on('categories -> SeqParameter( + * SQL("SELECT * FROM Test t WHERE {categories}"). + * on("categories" -> SeqParameter( * seq = Seq("a", "b", "c"), sep = " OR ", * pre = "EXISTS (SELECT NULL FROM j WHERE t.id=j.id AND name=", * post = ")")) * }}} */ - implicit def seqParamToStatement[A](implicit c: ToStatement[Seq[A]]) = + implicit def seqParamToStatement[A](implicit c: ToStatement[Seq[A]]): ToStatement[SeqParameter[A]] = new ToStatement[SeqParameter[A]] with NotNullGuard { def set(s: PreparedStatement, offset: Int, ps: SeqParameter[A]): Unit = c.set(s, offset, ps.values) @@ -625,11 +705,13 @@ sealed trait ToStatementPriority0 { * Sets an array parameter on statement (see `java.sql.Array`). * * {{{ - * anorm.SQL("INSERT INTO Table(arr) VALUES {a}"). + * import anorm._ + * + * SQL("INSERT INTO Table(arr) VALUES {a}"). * on("a" -> Array("A", "2", "C")) * }}} */ - implicit def arrayToParameter[A <: AnyRef](implicit m: ParameterMetaData[A]) = + implicit def arrayToParameter[A <: AnyRef](implicit m: ParameterMetaData[A]): ToStatement[Array[A]] = new ToStatement[Array[A]] with NotNullGuard { def set(s: PreparedStatement, i: Int, arr: Array[A]) = if (arr == (null: AnyRef)) throw new IllegalArgumentException() @@ -720,7 +802,7 @@ sealed trait JavaTimeToStatement { * import java.time.Instant * import anorm._ * - * SQL("SELECT * FROM Test WHERE time < {b}").on('b -> Instant.now) + * SQL("SELECT * FROM Test WHERE time < {b}").on("b" -> Instant.now) * }}} */ implicit def instantToStatement(implicit meta: ParameterMetaData[Instant]): ToStatement[Instant] = @@ -737,7 +819,7 @@ sealed trait JavaTimeToStatement { * import java.time.LocalDateTime * import anorm._ * - * SQL("SELECT * FROM Test WHERE time < {b}").on('b -> LocalDateTime.now) + * SQL("SELECT * FROM Test WHERE time < {b}").on("b" -> LocalDateTime.now) * }}} */ implicit def localDateTimeToStatement(implicit meta: ParameterMetaData[LocalDateTime]): ToStatement[LocalDateTime] = @@ -754,7 +836,7 @@ sealed trait JavaTimeToStatement { * import java.time.LocalDate * import anorm._ * - * SQL("SELECT * FROM Test WHERE time < {b}").on('b -> LocalDate.now) + * SQL("SELECT * FROM Test WHERE time < {b}").on("b" -> LocalDate.now) * }}} */ implicit def localDateToStatement(implicit meta: ParameterMetaData[LocalDate]): ToStatement[LocalDate] = @@ -771,7 +853,7 @@ sealed trait JavaTimeToStatement { * import java.time.ZonedDateTime * import anorm._ * - * SQL("SELECT * FROM Test WHERE time < {b}").on('b -> ZonedDateTime.now) + * SQL("SELECT * FROM Test WHERE time < {b}").on("b" -> ZonedDateTime.now) * }}} */ implicit def zonedDateTimeToStatement(implicit meta: ParameterMetaData[ZonedDateTime]): ToStatement[ZonedDateTime] = @@ -789,8 +871,10 @@ sealed trait ToStatementPriority1 extends ToStatementPriority0 { * For `null` value, `setNull` with `LONGVARBINARY` is called on statement. * * {{{ + * import anorm._ + * * def foo(arrayOfBytes: Array[Byte]) = - * anorm.SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> arrayOfBytes) + * SQL("INSERT INTO Table(bin) VALUES {b}").on("b" -> arrayOfBytes) * }}} */ implicit object byteArrayToStatement extends ToStatement[Array[Byte]] { diff --git a/core/src/main/scala/anorm/TupleFlattener.scala b/core/src/main/scala/anorm/TupleFlattener.scala index 614c78c5..619f12ea 100644 --- a/core/src/main/scala/anorm/TupleFlattener.scala +++ b/core/src/main/scala/anorm/TupleFlattener.scala @@ -65,8 +65,9 @@ sealed trait TupleFlattenerPriority4 extends TupleFlattenerPriority3 { * @param c5 Column #5 */ implicit def flattenerTo5[T1, T2, T3, T4, T5]: TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5) => (T1, T2, T3, T4, T5)] = - TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5) => (T1, T2, T3, T4, T5)] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5) => - (c1, c2, c3, c4, c5) + TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5) => (T1, T2, T3, T4, T5)] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5) => + (c1, c2, c3, c4, c5) } } @@ -86,8 +87,9 @@ sealed trait TupleFlattenerPriority5 extends TupleFlattenerPriority4 { */ implicit def flattenerTo6[T1, T2, T3, T4, T5, T6] : TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6) => (T1, T2, T3, T4, T5, T6)] = - TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6) => (T1, T2, T3, T4, T5, T6)] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6) => - (c1, c2, c3, c4, c5, c6) + TupleFlattener[(T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6) => (T1, T2, T3, T4, T5, T6)] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6) => + (c1, c2, c3, c4, c5, c6) } } @@ -241,8 +243,9 @@ sealed trait TupleFlattenerPriority11 extends TupleFlattenerPriority10 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12) } } @@ -276,8 +279,9 @@ sealed trait TupleFlattenerPriority12 extends TupleFlattenerPriority11 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13) } } @@ -312,8 +316,9 @@ sealed trait TupleFlattenerPriority13 extends TupleFlattenerPriority12 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) } } @@ -349,8 +354,9 @@ sealed trait TupleFlattenerPriority14 extends TupleFlattenerPriority13 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ~ T15 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15) } } @@ -387,8 +393,9 @@ sealed trait TupleFlattenerPriority15 extends TupleFlattenerPriority14 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ~ T15 ~ T16 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16) } } @@ -427,8 +434,9 @@ sealed trait TupleFlattenerPriority16 extends TupleFlattenerPriority15 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ~ T15 ~ T16 ~ T17 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17) } } @@ -468,8 +476,9 @@ sealed trait TupleFlattenerPriority17 extends TupleFlattenerPriority16 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ~ T15 ~ T16 ~ T17 ~ T18 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17 ~ c18) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17, c18) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17 ~ c18) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17, c18) } } @@ -510,8 +519,9 @@ sealed trait TupleFlattenerPriority18 extends TupleFlattenerPriority17 { ( T1 ~ T2 ~ T3 ~ T4 ~ T5 ~ T6 ~ T7 ~ T8 ~ T9 ~ T10 ~ T11 ~ T12 ~ T13 ~ T14 ~ T15 ~ T16 ~ T17 ~ T18 ~ T19 ) => (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) - ] { case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17 ~ c18 ~ c19) => - (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17, c18, c19) + ] { + case (c1 ~ c2 ~ c3 ~ c4 ~ c5 ~ c6 ~ c7 ~ c8 ~ c9 ~ c10 ~ c11 ~ c12 ~ c13 ~ c14 ~ c15 ~ c16 ~ c17 ~ c18 ~ c19) => + (c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17, c18, c19) } } diff --git a/core/src/main/scala/anorm/package.scala b/core/src/main/scala/anorm/package.scala index d1b08e17..5d7e40e3 100644 --- a/core/src/main/scala/anorm/package.scala +++ b/core/src/main/scala/anorm/package.scala @@ -4,7 +4,10 @@ import java.util.StringTokenizer -import java.sql.SQLException +import java.lang.reflect.InvocationTargetException +import java.sql.{ PreparedStatement, ResultSet, SQLException } + +import scala.reflect.ClassTag /** * Anorm API @@ -17,8 +20,7 @@ import java.sql.SQLException * SQL("Select 1") * }}} */ -package object anorm { - import scala.language.implicitConversions +package object anorm extends PackageCompat { /** Structural type for timestamp wrapper. */ type TimestampWrapper1 = { def getTimestamp: java.sql.Timestamp } @@ -30,7 +32,18 @@ package object anorm { def unapply(that: Any): Option[java.sql.Timestamp] = try { Some(that.asInstanceOf[TimestampWrapper1].getTimestamp) } catch { - case _: NoSuchMethodException => None + case _: NoSuchMethodException => + None + + case ie: InvocationTargetException => { + val cause = ie.getCause + + if (cause != null) { + throw cause + } + + throw ie + } } } @@ -46,6 +59,16 @@ package object anorm { } catch { case _: NoSuchMethodException => None case _: SQLException => None + + case ie: InvocationTargetException => { + val cause = ie.getCause + + if (cause != null) { + throw cause + } + + throw ie + } } } @@ -61,13 +84,19 @@ package object anorm { } catch { case _: NoSuchMethodException => None case _: SQLException => None + + case ie: InvocationTargetException => { + val cause = ie.getCause + + if (cause != null) { + throw cause + } + + throw ie + } } } - // TODO: Review implicit usage there - // (add explicit functions on SqlQuery?) - implicit def sqlToSimple(sql: SqlQuery): SimpleSql[Row] = sql.asSimple - /** * Creates an SQL query with given statement. * @param stmt SQL statement @@ -179,8 +208,9 @@ package object anorm { val groups = (gs match { case TokenGroup(List(StringToken("")), None) :: tgs => tgs // trim end case _ => gs - }).collect { case TokenGroup(pr, pl) => - TokenGroup(pr.reverse, pl) + }).collect { + case TokenGroup(pr, pl) => + TokenGroup(pr.reverse, pl) }.reverse TokenizedStatement(groups, ns.reverse) -> m @@ -188,22 +218,26 @@ package object anorm { } // Optimized resource typeclass not using reflection - object StatementResource extends resource.Resource[java.sql.PreparedStatement] { - - def close(stmt: java.sql.PreparedStatement) = stmt.close() + object StatementResource extends resource.Resource[PreparedStatement] { + def close(stmt: PreparedStatement) = stmt.close() @deprecated("Deprecated by Scala-ARM upgrade", "2.5.4") def fatalExceptions = Seq[Class[_]](classOf[Exception]) } + private[anorm] lazy val statementClassTag = + implicitly[ClassTag[PreparedStatement]] + // Optimized resource typeclass not using reflection - object ResultSetResource extends resource.Resource[java.sql.ResultSet] { - def close(rs: java.sql.ResultSet) = rs.close() + object ResultSetResource extends resource.Resource[ResultSet] { + def close(rs: ResultSet) = rs.close() @deprecated("Deprecated by Scala-ARM upgrade", "2.5.4") def fatalExceptions = Seq[Class[_]](classOf[Exception]) } + private[anorm] lazy val resultSetClassTag = implicitly[ClassTag[ResultSet]] + /** Activable features */ object features { diff --git a/core/src/test/scala-2.12+/anorm/ParameterSpec.scala b/core/src/test/scala-2.12+/anorm/Parameter212Spec.scala similarity index 51% rename from core/src/test/scala-2.12+/anorm/ParameterSpec.scala rename to core/src/test/scala-2.12+/anorm/Parameter212Spec.scala index 8ef745da..102f5f45 100644 --- a/core/src/test/scala-2.12+/anorm/ParameterSpec.scala +++ b/core/src/test/scala-2.12+/anorm/Parameter212Spec.scala @@ -4,22 +4,25 @@ import acolyte.jdbc.{ DefinedParameter => DParam, ParameterMetaData => ParamMeta import acolyte.jdbc.AcolyteDSL.{ connection, handleStatement } import acolyte.jdbc.Implicits._ -class `ParameterSpec 2.12` extends org.specs2.mutable.Specification { - - "Parameter (2.12)" title +final class Parameter212Spec extends org.specs2.mutable.Specification { + "Parameter (2.12+)".title val SqlStr = ParamMeta.Str "ToStatement" should { - implicit def con = connection(handleStatement.withUpdateHandler { - case UpdateExecution("EXEC proc ?", DParam("value:2", SqlStr) :: Nil) => 1 - case _ => 0 - }) + implicit def con: java.sql.Connection = + connection(handleStatement.withUpdateHandler { + case UpdateExecution("EXEC proc ?", DParam("value:2", SqlStr) :: Nil) => + 1 + + case _ => + 0 + }) "be contramap'ed" in { implicit val to: ToStatement[Int] = ToStatement.of[String].contramap[Int] { i => s"value:$i" } - SQL"""EXEC proc ${2}""".executeUpdate() must_== 1 + SQL"EXEC proc ${2}".executeUpdate() must_=== 1 } } } diff --git a/core/src/test/scala-2/TestUtils.scala b/core/src/test/scala-2/TestUtils.scala new file mode 100644 index 00000000..d44ffdcb --- /dev/null +++ b/core/src/test/scala-2/TestUtils.scala @@ -0,0 +1,9 @@ +package anorm + +import org.specs2.execute.{ Typecheck, Typechecked } + +object TestUtils { + import scala.language.experimental.macros + + def typecheck(code: String): Typechecked = macro Typecheck.typecheckImpl +} diff --git a/core/src/test/scala-2/anorm/AnormCompatSpec.scala b/core/src/test/scala-2/anorm/AnormCompatSpec.scala new file mode 100644 index 00000000..50be903a --- /dev/null +++ b/core/src/test/scala-2/anorm/AnormCompatSpec.scala @@ -0,0 +1,16 @@ +package anorm + +import acolyte.jdbc.AcolyteDSL.withQueryResult +import acolyte.jdbc.Implicits._ +import acolyte.jdbc.RowLists + +private[anorm] trait AnormCompatSpec { spec: AnormSpec => + "Query (scala2)" should { + "be executed as simple SQL" in withQueryResult(RowLists.booleanList :+ true) { implicit con => + val sql = SQL("SELECT 1") + + (implicitly[Sql](sql).aka("converted") must beAnInstanceOf[SimpleSql[_]]) + .and(SQL("SELECT 1").execute().aka("executed") must beTrue) + } + } +} diff --git a/core/src/test/scala-3/TestUtils.scala b/core/src/test/scala-3/TestUtils.scala new file mode 100644 index 00000000..924ddffd --- /dev/null +++ b/core/src/test/scala-3/TestUtils.scala @@ -0,0 +1,18 @@ +package anorm + +import scala.compiletime.testing.{ typeCheckErrors, Error } + +import org.specs2.execute.{ TypecheckError, TypecheckSuccess, Typechecked } + +object TestUtils: + + inline def typecheck(inline code: String): Typechecked = + typeCheckErrors(code).headOption match { + case Some(Error(msg, _, _, _)) => + Typechecked(code, TypecheckError(msg)) + + case _ => + Typechecked(code, TypecheckSuccess) + } + +end TestUtils diff --git a/core/src/test/scala-3/anorm/AnormCompatSpec.scala b/core/src/test/scala-3/anorm/AnormCompatSpec.scala new file mode 100644 index 00000000..23b297f3 --- /dev/null +++ b/core/src/test/scala-3/anorm/AnormCompatSpec.scala @@ -0,0 +1,3 @@ +package anorm + +private[anorm] trait AnormCompatSpec { _self: AnormSpec => } diff --git a/core/src/test/scala/anorm/AnormSpec.scala b/core/src/test/scala/anorm/AnormSpec.scala index d3324daf..f30be3a9 100644 --- a/core/src/test/scala/anorm/AnormSpec.scala +++ b/core/src/test/scala/anorm/AnormSpec.scala @@ -1,5 +1,7 @@ package anorm +import java.sql.Connection + import acolyte.jdbc.{ ExecutedParameter, QueryResult, RowLists, UpdateExecution } import acolyte.jdbc.AcolyteDSL.{ connection, handleQuery, handleStatement, updateResult, withQueryResult } import acolyte.jdbc.Implicits._ @@ -10,12 +12,12 @@ import RowLists.{ stringList, longList, rowList1, rowList2, rowList3 } import SqlParser.scalar final class AnormSpec extends Specification with H2Database with AnormTest { - "Anorm" title + "Anorm".title lazy val fooBarTable = rowList3(classOf[Long] -> "id", classOf[String] -> "foo", classOf[Int] -> "bar") "Row parser" should { - "return newly inserted data" in withH2Database { implicit c => + "return newly inserted data" in withH2Database { implicit c: Connection => createTest1Table() val ex: Boolean = SQL"""insert into test1(id, foo, bar) @@ -30,32 +32,33 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } } - "return defined option of case class" in withQueryResult(fooBarTable :+ (11L, "World", 21)) { implicit c => + "return defined option of case class" in withQueryResult(fooBarTable.append(11L, "World", 21)) { + implicit c: Connection => - SQL("SELECT * FROM test WHERE id = {id}") - .on("id" -> 11L) - .as(fooBarParser1.singleOpt) - .aka("result data") must beSome(TestTable(11L, "World", 21)) + SQL("SELECT * FROM test WHERE id = {id}") + .on("id" -> 11L) + .as(fooBarParser1.singleOpt) + .aka("result data") must beSome(TestTable(11L, "World", 21)) } "handle scalar result" >> { - "return single value" in withQueryResult(20) { implicit c => + "return single value" in withQueryResult(20) { implicit c: Connection => (SQL("SELECT * FROM test").as(scalar[Int].single).aka("single value #1") must_=== 20) .and(SQL("SELECT * FROM test").as(scalar[Int].single) must_=== 20) } - "return None for missing optional value" in withQueryResult(null.asInstanceOf[String]) { implicit c => + "return None for missing optional value" in withQueryResult(null.asInstanceOf[String]) { implicit c: Connection => SQL"SELECT * FROM test".withFetchSize(Some(1)).as(scalar[String].singleOpt) must beNone } - "return 0 for missing optional numeric" in withQueryResult(null.asInstanceOf[Double]) { implicit c => - SQL("SELECT * FROM test").as(scalar[Double].singleOpt).aka("single value") must beSome(0d) + "return 0 for missing optional numeric" in withQueryResult(null.asInstanceOf[Double]) { implicit c: Connection => + SQL("SELECT * FROM test").as(scalar[Double].singleOpt).aka("single value") must beSome(0D) } - "throw exception when single result is missing" in withQueryResult(fooBarTable) { implicit c => + "throw exception when single result is missing" in withQueryResult(fooBarTable) { implicit c: Connection => SQL("SELECT * FROM test").as(fooBarParser1.single).aka("mapping") must throwA[Exception].like { case e: Exception => @@ -65,72 +68,80 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "raise error when there is more than 1 required or optional row" in { - withQueryResult(stringList :+ "A" :+ "B") { implicit c => + withQueryResult(stringList :+ "A" :+ "B") { implicit c: Connection => lazy val sql = SQL("SELECT 1") (sql .as(scalar[String].single) - .aka("single parser") must throwA[Exception].like { case e: Exception => - e.getMessage.aka("error") must_=== - "SqlMappingError(too many rows when expecting a single one)" + .aka("single parser") must throwA[Exception].like { + case e: Exception => + e.getMessage.aka("error") must_=== + "SqlMappingError(too many rows when expecting a single one)" }).and( sql .as(scalar[String].singleOpt) - .aka("singleOpt parser") must throwA[Exception].like { case e: Exception => - e.getMessage.aka("error") must_=== - "SqlMappingError(too many rows when expecting a single one)" + .aka("singleOpt parser") must throwA[Exception].like { + case e: Exception => + e.getMessage.aka("error") must_=== + "SqlMappingError(too many rows when expecting a single one)" } ) } } - "return single string from executed query" in withQueryResult("Result for test-proc-1") { implicit c => + "return single string from executed query" in withQueryResult("Result for test-proc-1") { + implicit c: Connection => - SQL("EXEC stored_proc({param})") - .on("param" -> "test-proc-1") - .executeQuery() - .as(scalar[String].single) - .aka("single string") must_=== "Result for test-proc-1" + SQL("EXEC stored_proc({param})") + .on("param" -> "test-proc-1") + .executeQuery() + .as(scalar[String].single) + .aka("single string") must_=== "Result for test-proc-1" } } "handle optional property in case class" >> { "return instance with defined option" in withQueryResult( - rowList2(classOf[Int] -> "id", classOf[String] -> "val") :+ (2, "str") - ) { implicit c => + rowList2(classOf[Int] -> "id", classOf[String] -> "val").append(2, "str") + ) { implicit c: Connection => SQL("SELECT * FROM test") - .as((SqlParser.int("id") ~ SqlParser.str("val").?).map { case id ~ v => - id -> v - } single) + .as((SqlParser.int("id") ~ SqlParser.str("val").?).map { + case id ~ v => + id -> v + }.single) .aka("mapped data") must_=== (2 -> Some("str")) } "return instance with None for column not found" in withQueryResult(rowList1(classOf[Long] -> "id") :+ 123L) { - implicit c => + implicit c: Connection => SQL("SELECT * FROM test") - .as((SqlParser.long("id") ~ SqlParser.str("val").?).map { case id ~ v => - id -> v - } single) + .as((SqlParser.long("id") ~ SqlParser.str("val").?).map { + case id ~ v => + id -> v + }.single) .aka("mapped data") must_=== (123L -> None) } - "throw exception when type doesn't match" in withQueryResult(fooBarTable :+ (1L, "str", 3)) { implicit c => + "throw exception when type doesn't match" in withQueryResult(fooBarTable.append(1L, "str", 3)) { + implicit c: Connection => - SQL("SELECT * FROM test") - .as((SqlParser.long("id") ~ SqlParser.int("foo").?).map { case id ~ v => - id -> v - } single) - .aka("parser") must throwA[Exception].like { case e: Exception => - e.getMessage.aka("error") must startWith("TypeDoesNotMatch(Cannot convert str:") - } + SQL("SELECT * FROM test") + .as((SqlParser.long("id") ~ SqlParser.int("foo").?).map { + case id ~ v => + id -> v + }.single) + .aka("parser") must throwA[Exception].like { + case e: Exception => + e.getMessage.aka("error") must startWith("TypeDoesNotMatch(Cannot convert str:") + } } } - "throw exception when type doesn't match" in withQueryResult("str") { implicit c => + "throw exception when type doesn't match" in withQueryResult("str") { implicit c: Connection => SQL("SELECT * FROM test").as(scalar[Int].single).aka("mismatching type") must throwA[Exception]( "TypeDoesNotMatch" ) @@ -138,17 +149,18 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } lazy val rows1 = rowList1(classOf[String] -> "val") :+ "str" - lazy val rows2 = rowList2(classOf[Int] -> "id", classOf[String] -> "val") :+ (2, "str") + + lazy val rows2 = rowList2(classOf[Int] -> "id", classOf[String] -> "val").append(2, "str") "check column is found on left" >> { - "successfully with mandatory value" in withQueryResult(rows2) { implicit c => + "successfully with mandatory value" in withQueryResult(rows2) { implicit c: Connection => SQL("SELECT * FROM test") .as((SqlParser.int("id") ~> SqlParser.str("val")).single) .aka("mapped data") must_=== "str" } - "successfully with optional value" in withQueryResult(rows1) { implicit c => + "successfully with optional value" in withQueryResult(rows1) { implicit c: Connection => SQL("SELECT * FROM test") .as((SqlParser.int("id").? ~> SqlParser.str("val")).single) .aka("mapped data") must_=== "str" @@ -157,12 +169,12 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "check column is found on left" >> { - "successfully with mandatory value" in withQueryResult(rows2) { implicit c => + "successfully with mandatory value" in withQueryResult(rows2) { implicit c: Connection => SQL("SELECT * FROM test").as((SqlParser.int("id") <~ SqlParser.str("val")).single).aka("mapped data") must_=== 2 } - "successfully with optional value" in withQueryResult(rows1) { implicit c => + "successfully with optional value" in withQueryResult(rows1) { implicit c: Connection => SQL("SELECT * FROM test") .as((SqlParser.str("val") <~ SqlParser.int("id").?).single) .aka("mapped data") must_=== "str" @@ -170,19 +182,22 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } } - "fold row" in withQueryResult(rows2) { implicit c => + "fold row" in withQueryResult(rows2) { implicit c: Connection => SQL("SELECT * FROM test").as( SqlParser .folder(List.empty[(Any, String, String)]) { (ls, v, m) => Right((v, m.column.qualified, m.clazz) :: ls) } .singleOpt - ) must beSome(("str", ".val", "java.lang.String") :: (2, ".id", "int") :: Nil) + ) must beSome[List[Tuple3[Any, String, String]]].which { + _ must_=== (("str", ".val", "java.lang.String") :: + (2, ".id", "int") :: Nil) + } } } "Instance of case class" should { - "be parsed using convience parsers with column names" in withH2Database { implicit c => + "be parsed using convience parsers with column names" in withH2Database { implicit c: Connection => createTest1Table() val fixture = TestTable(11L, "World", 21) @@ -200,8 +215,8 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } } - "be parsed using raw 'get' parser with column names" in withQueryResult(fooBarTable :+ (11L, "World", 21)) { - implicit c => + "be parsed using raw 'get' parser with column names" in withQueryResult(fooBarTable.append(11L, "World", 21)) { + implicit c: Connection => SQL("select * from test1 where id = {id}") .on(Symbol("id") -> 11L) .as(fooBarParser2.singleOpt) @@ -210,7 +225,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "be parsed using convience parsers with column positions" in { - withQueryResult(fooBarTable :+ (11L, "World", 21)) { implicit c => + withQueryResult(fooBarTable.append(11L, "World", 21)) { implicit c: Connection => SQL("insert into test1(id, foo, bar) values ({id}, {foo}, {bar})") .on(Symbol("id") -> 11L, Symbol("foo") -> "World", Symbol("bar") -> 21) .execute() @@ -224,7 +239,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "be parsed using raw 'get' parser with column positions" in { - withQueryResult(fooBarTable :+ (11L, "World", 21)) { implicit c => + withQueryResult(fooBarTable.append(11L, "World", 21)) { implicit c: Connection => SQL("insert into test1(id, foo, bar) values ({id}, {foo}, {bar})") .on(Symbol("id") -> 11L, Symbol("foo") -> "World", Symbol("bar") -> 21) .execute() @@ -240,7 +255,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { "Result mixing named and unnamed columns" should { "be parsable using named and positional parsers" in withQueryResult( - rowList3(classOf[String], classOf[String], classOf[String]).withLabel(2, "named") :+ ("a", "b", "c") + rowList3(classOf[String], classOf[String], classOf[String]).withLabel(2, "named").append("a", "b", "c") ) { implicit con => SQL("SELECT *").as(mixedParser1.single).aka("parsed mixed result") must_=== (("a", "b", "c")) @@ -249,19 +264,19 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "List" should { - "be Nil when there is no result" in withQueryResult(QueryResult.Nil) { implicit c => + "be Nil when there is no result" in withQueryResult(QueryResult.Nil) { implicit c: Connection => SQL("EXEC test").as(scalar[Int].*).aka("list") must_=== Nil } "raise error when non-empty one is required and there is no result" in { - withQueryResult(QueryResult.Nil) { implicit c => + withQueryResult(QueryResult.Nil) { implicit c: Connection => SQL("EXEC test").as(scalar[Int].+).aka("non-empty list") must throwA[Throwable]("Empty Result Set") } } "be parsed from mapped result" in withQueryResult( - rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100) :+ ("row2", 200) - ) { implicit c => + rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100).append("row2", 200) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as(RowParser { row => @@ -270,28 +285,30 @@ final class AnormSpec extends Specification with H2Database with AnormTest { .aka("tuple list") must_=== List("row1" -> 100, "row2" -> 200) } - "be parsed from class mapping" in withQueryResult(fooBarTable :+ (12L, "World", 101) :+ (14L, "Mondo", 3210)) { - implicit c => - val exp = - List(TestTable(12L, "World", 101), TestTable(14L, "Mondo", 3210)) - val q = SQL("SELECT * FROM test") + "be parsed from class mapping" in withQueryResult( + fooBarTable.append(12L, "World", 101).append(14L, "Mondo", 3210) + ) { implicit c: Connection => + val exp = + List(TestTable(12L, "World", 101), TestTable(14L, "Mondo", 3210)) + val q = SQL("SELECT * FROM test") - (q.as(fooBarParser1.*).aka("list") must_=== exp).and(q.as(fooBarParser1.+).aka("non-empty list") must_=== exp) + (q.as(fooBarParser1.*).aka("list") must_=== exp).and(q.as(fooBarParser1.+).aka("non-empty list") must_=== exp) } "be parsed from mapping with optional column" in withQueryResult( - rowList2(classOf[Int] -> "id", classOf[String] -> "val").append(9, null.asInstanceOf[String]) :+ (2, "str") - ) { implicit c => + rowList2(classOf[Int] -> "id", classOf[String] -> "val").append(9, null.asInstanceOf[String]).append(2, "str") + ) { implicit c: Connection => SQL("SELECT * FROM test") - .as((SqlParser.int("id") ~ SqlParser.str("val").?).map { case id ~ v => - id -> v - } *) + .as((SqlParser.int("id") ~ SqlParser.str("val").?).map { + case id ~ v => + id -> v + }.*) .aka("parsed list") must_=== List(9 -> None, 2 -> Some("str")) } - "include scalar values" in withQueryResult(stringList :+ "A" :+ "B" :+ "C" :+ "D") { implicit c => + "include scalar values" in withQueryResult(stringList :+ "A" :+ "B" :+ "C" :+ "D") { implicit c: Connection => val exp = List("A", "B", "C", "D") val q = SQL("SELECT c FROM letters") @@ -301,7 +318,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "Aggregation over all rows" should { - "be empty when there is no result" in withQueryResult(QueryResult.Nil) { implicit c => + "be empty when there is no result" in withQueryResult(QueryResult.Nil) { implicit c: Connection => SQL"EXEC test" .fold[Option[Int]](None, ColumnAliaser.empty) { (_, _) => Some(0) } .aka("aggregated value") must beRight(Option.empty[Int]) @@ -309,8 +326,8 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "be parsed from mapped result" in withQueryResult( - rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100) :+ ("row2", 200) - ) { implicit c => + rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100).append("row2", 200) + ) { implicit c: Connection => SQL"SELECT * FROM test" .fold(List.empty[(String, Int)], ColumnAliaser.empty) { (l, row) => @@ -320,7 +337,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } - "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => + "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c: Connection => var i = 0 (SQL"SELECT str" @@ -329,14 +346,15 @@ final class AnormSpec extends Specification with H2Database with AnormTest { else sys.error("Failure") } - .aka("aggregate on failure") must beLike { case Left(err :: Nil) => - err.getMessage.aka("failure") must_=== "Failure" + .aka("aggregate on failure") must beLike { + case Left(err :: Nil) => + err.getMessage.aka("failure") must_=== "Failure" }).and(i.aka("row count") must_=== 1) } } "Aggregation over variable number of rows" should { - "be empty when there is no result" in withQueryResult(QueryResult.Nil) { implicit c => + "be empty when there is no result" in withQueryResult(QueryResult.Nil) { implicit c: Connection => SQL"EXEC test" .foldWhile(Option.empty[Int], ColumnAliaser.empty) { (_, _) => Some(0) -> true } .aka("aggregated value") must beRight(Option.empty[Int]) @@ -344,8 +362,8 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "be parsed from mapped result" in withQueryResult( - rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100) :+ ("row2", 200) - ) { implicit c => + rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100).append("row2", 200) + ) { implicit c: Connection => SQL"SELECT * FROM test" .foldWhile(List.empty[(String, Int)], ColumnAliaser.empty) { (l, row) => @@ -354,7 +372,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { .aka("tuple stream") must_=== Right(List("row1" -> 100, "row2" -> 200)) } - "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => + "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c: Connection => var i = 0 (SQL"SELECT str" @@ -363,21 +381,23 @@ final class AnormSpec extends Specification with H2Database with AnormTest { else sys.error("Failure") } - .aka("aggregate on failure") must beLike { case Left(err :: Nil) => - err.getMessage.aka("failure") must_=== "Failure" + .aka("aggregate on failure") must beLike { + case Left(err :: Nil) => + err.getMessage.aka("failure") must_=== "Failure" }).and(i.aka("row count") must_=== 1) } - "stop after first row" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => - var i = 0 + "stop after first row" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { + implicit c: Connection => + var i = 0 - SQL"SELECT str" - .foldWhile(Set.empty[String], ColumnAliaser.empty) { (l, row) => - if (i == 0) { i = i + 1; (l + row[String]("foo")) -> true } - else (l, false) + SQL"SELECT str" + .foldWhile(Set.empty[String], ColumnAliaser.empty) { (l, row) => + if (i == 0) { i = i + 1; (l + row[String]("foo")) -> true } + else (l, false) - } - .aka("partial aggregate") must_=== Right(Set("A")) + } + .aka("partial aggregate") must_=== Right(Set("A")) } } @@ -391,21 +411,21 @@ final class AnormSpec extends Specification with H2Database with AnormTest { @inline def withQRes[T](r: => QueryResult)(f: java.sql.Connection => T): T = f(connection(handleQuery(_ => r), "acolyte.resultSet.initOnFirstRow" -> "true")) - "do nothing when there is no result" in withQueryResult(QueryResult.Nil) { implicit c => + "do nothing when there is no result" in withQueryResult(QueryResult.Nil) { implicit c: Connection => SQL"EXEC test".withResult(go(_)).aka("iteration") must beRight.which { _.aka("result list") must beEmpty } } "do nothing when there is no result (with degraded result set)" in { - withQRes(QueryResult.Nil) { implicit c => + withQRes(QueryResult.Nil) { implicit c: Connection => SQL"EXEC test".withResultSetOnFirstRow(true).withResult(go(_)).aka("iteration") must beRight[List[Row]].like { case Row() :: Nil => ok } } } - "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => + "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c: Connection => var first = false (SQL"SELECT str" .withResult { @@ -413,13 +433,14 @@ final class AnormSpec extends Specification with H2Database with AnormTest { first = true; sys.error("Failure") case _ => sys.error("Unexpected") } - .aka("processing with failure") must beLeft.like { case err :: Nil => - err.getMessage.aka("failure") must_=== "Failure" + .aka("processing with failure") must beLeft.like { + case err :: Nil => + err.getMessage.aka("failure") must_=== "Failure" }).and(first.aka("first read") must beTrue) } "handle failure (with degraded result set)" in withQRes(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { - implicit c => + implicit c: Connection => var first = false (SQL"SELECT str" .withResultSetOnFirstRow(true) @@ -428,13 +449,14 @@ final class AnormSpec extends Specification with H2Database with AnormTest { first = true; sys.error("Failure") case _ => sys.error("Unexpected") } - .aka("processing with failure") must beLeft.like { case err :: Nil => - err.getMessage.aka("failure") must_=== "Failure" + .aka("processing with failure") must beLeft.like { + case err :: Nil => + err.getMessage.aka("failure") must_=== "Failure" }).and(first.aka("first read") must beTrue) } "stop after first row without failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { - implicit c => + implicit c: Connection => SQL"SELECT str" .withResult { case Some(first) => Set(first.row[String]("foo")) @@ -444,7 +466,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "stop after first row without failure (with degraded result set)" in { - withQRes(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => + withQRes(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c: Connection => SQL"SELECT str" .withResultSetOnFirstRow(true) .withResult { @@ -492,7 +514,7 @@ final class AnormSpec extends Specification with H2Database with AnormTest { SQL"INSERT ${3}".executeInsert(scalar[String].singleOpt).aka("insertion") must beSome("generated") } - "as long with column selection" in withH2Database { implicit c => + "as long with column selection" in withH2Database { implicit c: Connection => val tableName = s"foo${System.identityHashCode(c)}" createTable(tableName, "id bigint auto_increment", "name varchar") @@ -512,13 +534,6 @@ final class AnormSpec extends Specification with H2Database with AnormTest { } "Query" should { - "be executed as simple SQL" in withQueryResult(RowLists.booleanList :+ true) { implicit con => - val sql = SQL("SELECT 1") - - (implicitly[Sql](sql).aka("converted") must beAnInstanceOf[SimpleSql[_]]) - .and(SQL("SELECT 1").execute().aka("executed") must beTrue) - } - "be properly shown as string representation" >> { "using parser" in { Show @@ -576,24 +591,29 @@ final class AnormSpec extends Specification with H2Database with AnormTest { sealed trait AnormTest { db: H2Database => import SqlParser.{ get, int, long, str } - val fooBarParser1 = (long("id") ~ str("foo") ~ int("bar")).map { case id ~ foo ~ bar => - TestTable(id, foo, bar) + val fooBarParser1 = (long("id") ~ str("foo") ~ int("bar")).map { + case id ~ foo ~ bar => + TestTable(id, foo, bar) } val fooBarParser2 = - (get[Long]("id") ~ get[String]("foo") ~ get[Int]("bar")).map { case id ~ foo ~ bar => - TestTable(id, foo, bar) + (get[Long]("id") ~ get[String]("foo") ~ get[Int]("bar")).map { + case id ~ foo ~ bar => + TestTable(id, foo, bar) } - val fooBarParser3 = (long(1) ~ str(2) ~ int(3)).map { case id ~ foo ~ bar => - TestTable(id, foo, bar) + val fooBarParser3 = (long(1) ~ str(2) ~ int(3)).map { + case id ~ foo ~ bar => + TestTable(id, foo, bar) } - val fooBarParser4 = (get[Long](1) ~ get[String](2) ~ get[Int](3)).map { case id ~ foo ~ bar => - TestTable(id, foo, bar) + val fooBarParser4 = (get[Long](1) ~ get[String](2) ~ get[Int](3)).map { + case id ~ foo ~ bar => + TestTable(id, foo, bar) } - val mixedParser1 = (str(1) ~ str("named") ~ str(3)).map { case i ~ j ~ k => - (i, j, k) + val mixedParser1 = (str(1) ~ str("named") ~ str(3)).map { + case i ~ j ~ k => + (i, j, k) } } diff --git a/core/src/test/scala/anorm/BatchSqlSpec.scala b/core/src/test/scala/anorm/BatchSqlSpec.scala index 0b89f053..8b2965c2 100644 --- a/core/src/test/scala/anorm/BatchSqlSpec.scala +++ b/core/src/test/scala/anorm/BatchSqlSpec.scala @@ -1,7 +1,8 @@ package anorm -class BatchSqlSpec extends org.specs2.mutable.Specification with H2Database { - "Batch SQL" title +final class BatchSqlSpec extends org.specs2.mutable.Specification with H2Database { + + "Batch SQL".title "Creation" should { "fail with parameter maps not having same names" in { @@ -71,9 +72,12 @@ class BatchSqlSpec extends org.specs2.mutable.Specification with H2Database { "be successful with first parameter map" in { val b1 = BatchSql("SELECT * FROM tbl WHERE a = {a}, b = {b}", Seq[NamedParameter]("a" -> 0, "b" -> 1), Nil) - implicit val toParams = ToParameterList[(Int, Int)] { case (a, b) => - List[NamedParameter](NamedParameter.namedWithString("a" -> a), NamedParameter.namedWithString("b" -> b)) - } + implicit val toParams: ToParameterList[(Int, Int)] = + ToParameterList[(Int, Int)] { + case (a, b) => + List[NamedParameter](NamedParameter.namedWithString("a" -> a), NamedParameter.namedWithString("b" -> b)) + } + lazy val b2 = b1.bind(2 -> 3) lazy val expectedMaps = Seq( diff --git a/core/src/test/scala/anorm/CursorSpec.scala b/core/src/test/scala/anorm/CursorSpec.scala index 2d68d3bc..2c60d206 100644 --- a/core/src/test/scala/anorm/CursorSpec.scala +++ b/core/src/test/scala/anorm/CursorSpec.scala @@ -3,8 +3,8 @@ package anorm import acolyte.jdbc.Implicits._ import acolyte.jdbc.RowLists.rowList1 -class CursorSpec extends org.specs2.mutable.Specification { - "Cursor" title +final class CursorSpec extends org.specs2.mutable.Specification { + "Cursor".title "Cursor" should { "not be returned when there is no result" in { @@ -12,14 +12,14 @@ class CursorSpec extends org.specs2.mutable.Specification { } "be returned for one row" in { - Cursor(stringList :+ "A" resultSet, ColumnAliaser.empty).aka("cursor") must beSome.which { cur => + Cursor((stringList :+ "A").resultSet, ColumnAliaser.empty).aka("cursor") must beSome.which { cur => (cur.row[String]("str").aka("row") must_=== "A").and(cur.next.aka("after first") must beNone) } } "be return for three rows" in { - Cursor(stringList :+ "red" :+ "green" :+ "blue" resultSet, ColumnAliaser.empty).aka("cursor") must beSome.which { - first => + Cursor((stringList :+ "red" :+ "green" :+ "blue").resultSet, ColumnAliaser.empty).aka("cursor") must beSome + .which { first => (first .row[String]("str") .aka("row #1") must_=== "red").and(first.next.aka("after first") must beSome.which { snd => @@ -28,14 +28,15 @@ class CursorSpec extends org.specs2.mutable.Specification { (third.row[String]("str").aka("row #1") must_=== "blue").and(third.next.aka("after third") must beNone) }) }) - } + } } "match pattern" in { - Cursor(stringList :+ "Foo" :+ "Bar" resultSet, ColumnAliaser.empty).aka("cursor") must beSome[Cursor].like { + Cursor((stringList :+ "Foo" :+ "Bar").resultSet, ColumnAliaser.empty).aka("cursor") must beSome[Cursor].like { case Cursor(row1, b) => - (row1[String](1) must_=== "Foo").and(b must beSome[Cursor].like { case Cursor(row2, None) => - row2[String](1) must_=== "Bar" + (row1[String](1) must_=== "Foo").and(b must beSome[Cursor].like { + case Cursor(row2, None) => + row2[String](1) must_=== "Bar" }) } } diff --git a/core/src/test/scala/anorm/FunctionAdapterSpec.scala b/core/src/test/scala/anorm/FunctionAdapterSpec.scala index 221deeaf..d6e05802 100644 --- a/core/src/test/scala/anorm/FunctionAdapterSpec.scala +++ b/core/src/test/scala/anorm/FunctionAdapterSpec.scala @@ -1,5 +1,7 @@ package anorm +import java.sql.Connection + import acolyte.jdbc.AcolyteDSL.withQueryResult import acolyte.jdbc.Implicits._ import acolyte.jdbc.RowLists._ @@ -9,11 +11,11 @@ import com.github.ghik.silencer.silent import SqlParser.{ bool, str, int, long, get } final class FunctionAdapterSpec extends org.specs2.mutable.Specification { - "Function flattener" title + "Function flattener".title "Single column" should { - "be applied with parser function" in withQueryResult(intList :+ 123) { implicit c => - SQL("SELECT * FROM test").as(int(1).map(SqlParser.to(_.toString)) single) must_=== "123" + "be applied with parser function" in withQueryResult(intList :+ 123) { implicit c: Connection => + SQL("SELECT * FROM test").as(int(1).map(SqlParser.to(_.toString)).single) must_=== "123" } } @@ -23,9 +25,9 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { val schema = rowList2(classOf[String] -> "A", classOf[Int] -> "B") - withQueryResult(schema :+ ("A", 2)) { implicit c => + withQueryResult(schema.append("A", 2)) { implicit c: Connection => SQL("SELECT * FROM test") - .as((str("A") ~ int("B")).map(SqlParser.to(foo _)) single) + .as((str("A") ~ int("B")).map(SqlParser.to(foo _)).single) .aka("function result") must_=== "Fn2" } @@ -35,9 +37,9 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { case class Foo(a: String, b: Int, c: Long) val schema = rowList3(classOf[String] -> "A", classOf[Int] -> "B", classOf[Long] -> "C") - withQueryResult(schema :+ ("A", 2, 3L)) { implicit c => + withQueryResult(schema.append("A", 2, 3L)) { implicit c: Connection => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C")).map(SqlParser.to(Foo.apply _)) single) + .as((str("A") ~ int("B") ~ long("C")).map(SqlParser.to(Foo.apply _)).single) .aka("function result") must_=== Foo("A", 2, 3L) } @@ -48,9 +50,9 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { val schema = rowList4(classOf[String] -> "A", classOf[Int] -> "B", classOf[Long] -> "C", classOf[Double] -> "D") - withQueryResult(schema :+ ("A", 2, 3L, 4.56d)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D)) { implicit c: Connection => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D")).map(SqlParser.to(foo _)) single) + .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D")).map(SqlParser.to(foo _)).single) .aka("function result") must_=== "Fn4" } @@ -67,9 +69,9 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Short] -> "E" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort)) { implicit c: Connection => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E")).map(SqlParser.to(foo _)) single) + .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E")).map(SqlParser.to(foo _)).single) .aka("function result") must_=== "Fn5" } @@ -87,11 +89,12 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Byte] -> "F" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte)) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn6" @@ -111,11 +114,12 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Boolean] -> "G" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true)) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn7" @@ -136,12 +140,12 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[String] -> "H" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B")) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B")) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - )).map(SqlParser.to(foo _)) single + )).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn8" @@ -163,12 +167,12 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Int] -> "I" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3)) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I")).map(SqlParser.to(foo _)) single + ) ~ int("I")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn9" @@ -202,14 +206,15 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Long] -> "J" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L)) { implicit c => - SQL("SELECT * FROM test") - .as( - (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( - "H" - ) ~ int("I") ~ long("J")).map(SqlParser.to(foo _)) single - ) - .aka("function result") must_=== "Fn10" + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L)) { + implicit c: Connection => + SQL("SELECT * FROM test") + .as( + (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( + "H" + ) ~ int("I") ~ long("J")).map(SqlParser.to(foo _)).single + ) + .aka("function result") must_=== "Fn10" } } @@ -243,14 +248,15 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Double] -> "K" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d)) { implicit c => - SQL("SELECT * FROM test") - .as( - (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( - "H" - ) ~ int("I") ~ long("J") ~ get[Double]("K")).map(SqlParser.to(foo _)) single - ) - .aka("function result") must_=== "Fn11" + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D)) { + implicit c: Connection => + SQL("SELECT * FROM test") + .as( + (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( + "H" + ) ~ int("I") ~ long("J") ~ get[Double]("K")).map(SqlParser.to(foo _)).single + ) + .aka("function result") must_=== "Fn11" } } @@ -286,13 +292,13 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { classOf[Short] -> "L" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort)) { - implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort)) { + implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L")).map(SqlParser.to(foo _)) single + ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn12" @@ -333,14 +339,15 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte) - ) { implicit c => + schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn13" @@ -383,14 +390,15 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false) - ) { implicit c => + schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte, false) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn14" @@ -435,14 +443,16 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C") - ) { implicit c => + schema + .append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte, false, "C") + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn15" @@ -489,15 +499,32 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3 + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - )).map(SqlParser.to(foo _)) single + )).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn16" @@ -546,15 +573,33 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q")).map(SqlParser.to(foo _)) single + ) ~ long("Q")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn17" @@ -605,15 +650,34 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R")).map(SqlParser.to(foo _)) single + ) ~ long("Q") ~ get[Double]("R")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn18" @@ -666,15 +730,35 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S")).map(SqlParser.to(foo _)) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn19" @@ -729,15 +813,36 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D") - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D" + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T")).map(SqlParser.to(foo _)) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn20" @@ -794,15 +899,37 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D", 4) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D", + 4 + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U")).map(SqlParser.to(foo _)) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U")).map(SqlParser.to(foo _)).single ) .aka("function result") must_=== "Fn21" @@ -861,8 +988,31 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D", 4, 5L) - ) { implicit c => + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D", + 4, + 5L + ) + ) { implicit c: Connection => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( @@ -870,7 +1020,8 @@ final class FunctionAdapterSpec extends org.specs2.mutable.Specification { ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U") ~ long("V")) - .map(SqlParser.to(foo _)) single + .map(SqlParser.to(foo _)) + .single ) .aka("function result") must_=== "Fn22" diff --git a/core/src/test/scala/anorm/H2Database.scala b/core/src/test/scala/anorm/H2Database.scala index 1ffe3786..f484903f 100644 --- a/core/src/test/scala/anorm/H2Database.scala +++ b/core/src/test/scala/anorm/H2Database.scala @@ -19,7 +19,9 @@ trait H2Database { case class TestTable(id: Long, foo: String, bar: Int) implicit val testToParams: ToParameterList[TestTable] = - Macro.toParameters[TestTable] + ToParameterList[TestTable] { table => + List[NamedParameter]("id" -> table.id, "foo" -> table.foo, "bar" -> table.bar) + } /** Create a simple 'test1' table for testing with. */ def createTest1Table()(implicit conn: Connection): Unit = createTable("test1", "id bigint", "foo varchar", "bar int") diff --git a/core/src/test/scala/anorm/JavaTimeSpec.scala b/core/src/test/scala/anorm/JavaTimeSpec.scala index 7208a630..b462d62f 100644 --- a/core/src/test/scala/anorm/JavaTimeSpec.scala +++ b/core/src/test/scala/anorm/JavaTimeSpec.scala @@ -1,6 +1,6 @@ package anorm -import java.sql.Timestamp +import java.sql.{ Connection, Timestamp } import java.time.{ Instant, LocalDate, LocalDateTime, ZoneId, ZonedDateTime } import acolyte.jdbc.AcolyteDSL._ @@ -9,7 +9,9 @@ import acolyte.jdbc.RowLists._ import org.specs2.mutable.Specification -class JavaTimeColumnSpec extends Specification { +final class JavaTimeColumnSpec extends Specification { + "Java time column".title + import SqlParser.scalar "Column mapped as Java8+ instant" should { @@ -20,38 +22,40 @@ class JavaTimeColumnSpec extends Specification { val time = instant.toEpochMilli - "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit con => - SQL("SELECT d").as(scalar[Instant].single).aka("parsed instant") must_=== instant + "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit c: Connection => + SQL("SELECT d").as(scalar[Instant].single)(c).aka("parsed instant") must_=== instant } - "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit con => + "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[Instant].single).aka("parsed instant") must_=== instant } - "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { implicit con => - SQL("SELECT ts").as(scalar[Instant].single).aka("parsed instant") must_=== instant + "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[Instant].single).aka("parsed instant") must_=== instant } "be parsed from timestamp with nano precision" in { val instantWithNanoPrecision = Instant.parse("2021-01-06T08:45:26.441477Z") - withQueryResult(timestampList :+ Timestamp.from(instantWithNanoPrecision)) { implicit con => + withQueryResult(timestampList :+ Timestamp.from(instantWithNanoPrecision)) { implicit c: Connection => SQL("SELECT ts").as(scalar[Instant].single).aka("parsed instant") must_=== instantWithNanoPrecision } } - "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit con => + "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit c: Connection => SQL("SELECT time").as(scalar[Instant].single).aka("parsed instant") must_=== instant } "be parsed from timestamp wrapper" >> { - "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit con => + "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[Instant].single).aka("parsed instant") must_=== instant } - "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { implicit con => - SQL("SELECT ts").as(scalar[Instant].singleOpt).aka("parsed instant") must beNone + "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[Instant].singleOpt).aka("parsed instant") must beNone } } } @@ -65,30 +69,32 @@ class JavaTimeColumnSpec extends Specification { val date = LocalDateTime.ofInstant(instant, ZoneId.systemDefault) val time = instant.toEpochMilli - "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit con => + "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit c: Connection => SQL("SELECT d").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date } - "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit con => + "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date } - "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { implicit con => - SQL("SELECT ts").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date + "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date } - "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit con => + "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit c: Connection => SQL("SELECT time").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date } "be parsed from timestamp wrapper" >> { - "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit con => + "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[LocalDateTime].single).aka("parsed local date/time") must_=== date } - "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { implicit con => - SQL("SELECT ts").as(scalar[LocalDateTime].singleOpt).aka("parsed local date/time") must beNone + "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[LocalDateTime].singleOpt).aka("parsed local date/time") must beNone } } } @@ -98,30 +104,32 @@ class JavaTimeColumnSpec extends Specification { val date = LocalDate.now() val time = instant.toEpochMilli - "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit con => + "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit c: Connection => SQL("SELECT d").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date } - "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit con => + "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date } - "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { implicit con => - SQL("SELECT ts").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date + "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date } - "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit con => + "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit c: Connection => SQL("SELECT time").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date } "be parsed from timestamp wrapper" >> { - "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit con => + "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[LocalDate].single).aka("parsed local date/time") must_=== date } - "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { implicit con => - SQL("SELECT ts").as(scalar[LocalDate].singleOpt).aka("parsed local date/time") must beNone + "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[LocalDate].singleOpt).aka("parsed local date/time") must beNone } } } @@ -135,38 +143,40 @@ class JavaTimeColumnSpec extends Specification { val date = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault) val time = instant.toEpochMilli - "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit con => + "be parsed from date" in withQueryResult(dateList :+ new java.sql.Date(time)) { implicit c: Connection => SQL("SELECT d").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date } - "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit con => + "be parsed from time" in withQueryResult(timeList :+ new java.sql.Time(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date } - "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { implicit con => - SQL("SELECT ts").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date + "be parsed from timestamp" in withQueryResult(timestampList :+ new java.sql.Timestamp(time)) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date } "be parsed from timestamp with nano precision" in { val instantWithNanoPrecision = Instant.parse("2021-01-06T08:45:26.441477Z") - withQueryResult(timestampList :+ java.sql.Timestamp.from(instantWithNanoPrecision)) { implicit con => + withQueryResult(timestampList :+ java.sql.Timestamp.from(instantWithNanoPrecision)) { implicit c: Connection => SQL("SELECT ts").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== ZonedDateTime .ofInstant(instantWithNanoPrecision, ZoneId.systemDefault) } } - "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit con => + "be parsed from numeric time" in withQueryResult(longList :+ time) { implicit c: Connection => SQL("SELECT time").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date } "be parsed from timestamp wrapper" >> { - "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit con => + "with not null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ tsw1(time)) { implicit c: Connection => SQL("SELECT ts").as(scalar[ZonedDateTime].single).aka("parsed zoned date/time") must_=== date } - "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { implicit con => - SQL("SELECT ts").as(scalar[ZonedDateTime].singleOpt).aka("parsed zoned date/time") must beNone + "with null value" in withQueryResult(rowList1(classOf[TWrapper]) :+ null.asInstanceOf[TWrapper]) { + implicit c: Connection => + SQL("SELECT ts").as(scalar[ZonedDateTime].singleOpt).aka("parsed zoned date/time") must beNone } } } diff --git a/core/src/test/scala/anorm/MacroSpec.scala b/core/src/test/scala/anorm/MacroSpec.scala index 4fef3319..694feecd 100644 --- a/core/src/test/scala/anorm/MacroSpec.scala +++ b/core/src/test/scala/anorm/MacroSpec.scala @@ -7,6 +7,7 @@ import acolyte.jdbc.{ DefinedParameter => DParam, ParameterMetaData => ParamMeta import acolyte.jdbc.AcolyteDSL.{ connection, handleStatement, withQueryResult } import acolyte.jdbc.Implicits._ +import org.specs2.matcher.TypecheckMatchers._ import org.specs2.specification.core.Fragments import com.github.ghik.silencer.silent @@ -15,7 +16,9 @@ import Macro.ColumnNaming import SqlParser.scalar final class MacroSpec extends org.specs2.mutable.Specification { - "Macro" title + "Macro".title + + import TestUtils.typecheck val barRow1 = RowLists.rowList1(classOf[Int] -> "v") @@ -48,47 +51,61 @@ final class MacroSpec extends org.specs2.mutable.Specification { } "Generated named parser" should { - // No Column[Bar] so compilation error is expected - shapeless.test.illTyped("anorm.Macro.namedParser[Foo[Bar]]") - - // Not enough column names for class parameters - shapeless.test.illTyped("""anorm.Macro.parser[Foo[Int]]("Foo", "Bar")""") + "not be resolved" in { + // No Column[Bar] so compilation error is expected + (typecheck("anorm.Macro.namedParser[Foo[Bar]]") must failWith( + ".*cannot find.* .*Column.* nor .*RowParser.* for .*loremIpsum.*Bar.*" + )).and { + // Not enough column names for class parameters + typecheck("""anorm.Macro.parser[Foo[Int]]("Foo", "Bar")""") must failWith( + ".*no column name for parameters.* .*Foo.* .*Bar.*" + ) + } + } "be successful for Bar" in withQueryResult(barRow1 :+ 1 :+ 3) { implicit c => val parser1 = Macro.namedParser[Bar] val parser2 = Macro.parser[Bar]("v") - (SQL"TEST".as(parser1.*) must_=== List(Bar(1), Bar(3))).and(SQL"TEST".as(parser2.*) must_=== List(Bar(1), Bar(3))) + (SQL"TEST".as(parser1.*) must_=== List(Bar(1), Bar(3))).and { + SQL"TEST".as(parser2.*) must_=== List(Bar(1), Bar(3)) + } } "be successful for Foo[Int]" >> { def spec(parser1: RowParser[Foo[Int]], parser2: RowParser[Foo[Int]])(implicit c: Connection) = { val expected = List( - Foo(1.2f, "str1")(1, Some(2L))(Some(true)), - Foo(2.3f, "str2")(4, None)(None), - Foo(3.4f, "str3")(5, Some(3L))(None), - Foo(5.6f, "str4")(6, None)(Some(false)) + Foo(1.2F, "str1")(1, Some(2L))(Some(true)), + Foo(2.3F, "str2")(4, None)(None), + Foo(3.4F, "str3")(5, Some(3L))(None), + Foo(5.6F, "str4")(6, None)(Some(false)) ) - (SQL"TEST".as(parser1.*) must_=== expected).and(SQL("TEST").as(parser2.*) must_=== expected) + (SQL"TEST".as(parser1.*) must_=== expected).and { + SQL("TEST").as(parser2.*) must_=== expected + } } "using the default column naming" in withQueryResult( - fooRow1 :+ (1.2f, "str1", 1, 2L, true) :+ (2.3f, "str2", 4, - nullLong, nullBoolean) :+ (3.4f, "str3", - 5, 3L, nullBoolean) :+ (5.6f, "str4", 6, - nullLong, false) + fooRow1 + .append(1.2F, "str1", 1, 2L, true) + .append(2.3F, "str2", 4, nullLong, nullBoolean) + .append(3.4F, "str3", 5, 3L, nullBoolean) + .append(5.6F, "str4", 6, nullLong, false) ) { implicit con => - spec(Macro.namedParser[Foo[Int]], Macro.parser[Foo[Int]]("r", "bar", "loremIpsum", "opt", "x")) - + spec( + parser1 = Macro.namedParser[Foo[Int]], + parser2 = Macro.parser[Foo[Int]]("r", "bar", "loremIpsum", "opt", "x") + ) } "using the snake case naming" in withQueryResult( - fooRow2 :+ (1.2f, "str1", 1, 2L, true) :+ (2.3f, "str2", 4, - nullLong, nullBoolean) :+ (3.4f, "str3", - 5, 3L, nullBoolean) :+ (5.6f, "str4", 6, - nullLong, false) + fooRow2 + .append(1.2F, "str1", 1, 2L, true) + .append(2.3F, "str2", 4, nullLong, nullBoolean) + .append(3.4F, "str3", 5, 3L, nullBoolean) + .append(5.6F, "str4", 6, nullLong, false) ) { implicit con => spec( @@ -112,71 +129,84 @@ final class MacroSpec extends org.specs2.mutable.Specification { classOf[Int] -> "v" ) - withQueryResult(row :+ (1.2f, "str1", 1, 2L, true, 6)) { implicit c => - SQL"TEST".as(fooBar.singleOpt) must beSome(Foo(1.2f, "str1")(Bar(6), Some(2))(Some(true))) + withQueryResult(row.append(1.2F, "str1", 1, 2L, true, 6)) { implicit c => + SQL"TEST".as(fooBar.singleOpt) must beSome(Foo(1.2F, "str1")(Bar(6), Some(2))(Some(true))) } } "support self reference" in { - val _ = Macro.namedParser[Self] // check compile is ok + // check compile is ok + typecheck("Macro.namedParser[Self]") must not(beNull) - ok // TODO: Supports aliasing to make it really usable (see #124) + // TODO: Supports aliasing to make it really usable (see #124) } } "Generated indexed parser" should { - // No Column[Bar] so compilation error is expected - shapeless.test.illTyped("anorm.Macro.indexedParser[Foo[Bar]]") + "not be resolved" in { + // No Column[Bar] so compilation error is expected + typecheck("anorm.Macro.indexedParser[Foo[Bar]]") must failWith( + ".*cannot find .*Column.* nor .*RowParser.* .*loremIpsum.*Bar.*" + ) + } "be successful for Bar" in withQueryResult(RowLists.intList :+ 1 :+ 3) { implicit c => SQL"TEST".as(Macro.indexedParser[Bar].*) must_=== List(Bar(1), Bar(3)) } "be successful for Foo[Int]" in withQueryResult( - fooRow1 :+ (1.2f, "str1", 1, 2L, true) :+ (2.3f, "str2", 4, - nullLong, - nullBoolean) :+ (3.4f, "str3", 5, 3L, - nullBoolean) :+ (5.6f, "str4", 6, - nullLong, false) + fooRow1 + .append(1.2F, "str1", 1, 2L, true) + .append(2.3F, "str2", 4, nullLong, nullBoolean) + .append(3.4F, "str3", 5, 3L, nullBoolean) + .append(5.6F, "str4", 6, nullLong, false) ) { implicit con => val parser: RowParser[Foo[Int]] = Macro.indexedParser[Foo[Int]] SQL"TEST".as(parser.*) must_=== List( - Foo(1.2f, "str1")(1, Some(2L))(Some(true)), - Foo(2.3f, "str2")(4, None)(None), - Foo(3.4f, "str3")(5, Some(3L))(None), - Foo(5.6f, "str4")(6, None)(Some(false)) + Foo(1.2F, "str1")(1, Some(2L))(Some(true)), + Foo(2.3F, "str2")(4, None)(None), + Foo(3.4F, "str3")(5, Some(3L))(None), + Foo(5.6F, "str4")(6, None)(Some(false)) ) } } "Generated indexed parser (with an offset)" should { - // No Column[Bar] so compilation error is expected - shapeless.test.illTyped("anorm.Macro.offsetParser[Foo[Bar]]") + "not be resolved" in { + // No Column[Bar] so compilation error is expected + typecheck("Macro.offsetParser[Foo[Bar]](1)") must failWith( + ".*cannot find .*Column.* nor .*RowParser.* for loremIpsum.*Bar.* .*Foo.*" + ) + } "be successful for Bar" in withQueryResult(RowLists.intList :+ 1 :+ 3) { implicit c => SQL"TEST".as(Macro.offsetParser[Bar](0).*) must_=== List(Bar(1), Bar(3)) } "be successful for Foo[Int]" in withQueryResult( - fooRow1 :+ (1.2f, "str1", 1, 2L, true) :+ (2.3f, "str2", 4, - nullLong, - nullBoolean) :+ (3.4f, "str3", 5, 3L, - nullBoolean) :+ (5.6f, "str4", 6, - nullLong, false) + fooRow1 + .append(1.2F, "str1", 1, 2L, true) + .append(2.3F, "str2", 4, nullLong, nullBoolean) + .append(3.4F, "str3", 5, 3L, nullBoolean) + .append(5.6F, "str4", 6, nullLong, false) ) { implicit con => val parser: RowParser[Foo[Int]] = Macro.offsetParser[Foo[Int]](0) SQL"TEST".as(parser.*) must_=== List( - Foo(1.2f, "str1")(1, Some(2L))(Some(true)), - Foo(2.3f, "str2")(4, None)(None), - Foo(3.4f, "str3")(5, Some(3L))(None), - Foo(5.6f, "str4")(6, None)(Some(false)) + Foo(1.2F, "str1")(1, Some(2L))(Some(true)), + Foo(2.3F, "str2")(4, None)(None), + Foo(3.4F, "str3")(5, Some(3L))(None), + Foo(5.6F, "str4")(6, None)(Some(false)) ) } "be successful for Goo[T] with offset = 2" in withQueryResult( - fooRow1 :+ (1.2f, "str1", 1, 2L, true) :+ (2.3f, "str2", 4, nullLong, nullBoolean) :+ (3.4f, "str3", 5, 3L, nullBoolean) :+ (5.6f, "str4", 6, nullLong, false) + fooRow1 + .append(1.2F, "str1", 1, 2L, true) + .append(2.3F, "str2", 4, nullLong, nullBoolean) + .append(3.4F, "str3", 5, 3L, nullBoolean) + .append(5.6F, "str4", 6, nullLong, false) ) { implicit con => val parser: RowParser[Goo[Int]] = Macro.offsetParser[Goo[Int]](2) @@ -216,25 +246,29 @@ final class MacroSpec extends org.specs2.mutable.Specification { } "Sealed parser" should { - // No subclass - shapeless.test.illTyped("anorm.Macro.sealedParser[NoSubclass]") - - // Cannot find the RowParser instances for the subclasses, - // from the implicit scope - shapeless.test.illTyped("Macro.sealedParser[Family]") - - // No subclass - shapeless.test.illTyped("Macro.sealedParser[EmptyFamily]") + "not be resolved" in { + (typecheck("Macro.sealedParser[NoSubclass]") must failWith(".*cannot find any subclass.* .*NoSubclass.*")) + .and { + // Cannot find the RowParser instances for the subclasses, + // from the implicit scope + typecheck("Macro.sealedParser[Family]") must failWith(".*sealed.* .*Bar.* .*CaseObj.*") + } + .and { + // No subclass + typecheck("Macro.sealedParser[EmptyFamily]") must failWith(".*cannot find any subclass.* .*EmptyFamily.*") + } + } "be successful for the Family trait" >> { "with the default discrimination" in { val barRow2 = RowLists.rowList2(classOf[String] -> "classname", classOf[Int] -> "v") - withQueryResult(barRow2 :+ ("anorm.MacroSpec.Bar", 1) :+ ("anorm.MacroSpec.CaseObj", -1)) { implicit c => + withQueryResult(barRow2.append("anorm.MacroSpec.Bar", 1).append("anorm.MacroSpec.CaseObj", -1)) { implicit c => implicit val caseObjParser = RowParser[CaseObj.type] { _ => Success(CaseObj) } - implicit val barParser = Macro.namedParser[Bar] + implicit val barParser: RowParser[Bar] = + SqlParser.int("v").map { Bar(_) } // cannot handle object anorm.MacroSpec.NotCase: no case accessor @silent def familyParser = Macro.sealedParser[Family] @@ -246,11 +280,12 @@ final class MacroSpec extends org.specs2.mutable.Specification { "with a customized discrimination" in { val barRow2 = RowLists.rowList2(classOf[String] -> "foo", classOf[Int] -> "v") - withQueryResult(barRow2 :+ ("Bar", 1) :+ ("CaseObj", -1)) { implicit c => + withQueryResult(barRow2.append("Bar", 1).append("CaseObj", -1)) { implicit c => implicit val caseObjParser = RowParser[CaseObj.type] { _ => Success(CaseObj) } - implicit val barParser = Macro.namedParser[Bar] + implicit val barParser: RowParser[Bar] = + SqlParser.int("v").map { Bar(_) } // cannot handle object anorm.MacroSpec.NotCase: no case accessor @silent def familyParser = @@ -266,8 +301,11 @@ final class MacroSpec extends org.specs2.mutable.Specification { import Macro.{ ParameterProjection => proj } import NamedParameter.{ namedWithString => named } - // No ToParameterList[Bar] so compilation error is expected - shapeless.test.illTyped("anorm.Macro.toParameters[Goo[Bar]]") + "not be resolved" in { + typecheck("Macro.toParameters[Goo[Bar]]") must failWith( + ".*cannot find either .*ToParameterList.* or .*ToStatement.* for .*Bar.*" + ) + } "be successful for Bar" >> { val fixture = Bar(1) @@ -277,10 +315,11 @@ final class MacroSpec extends org.specs2.mutable.Specification { Macro.toParameters[Bar]() -> List(named("v" -> 1)), Macro.toParameters[Bar](proj("v", "w")) -> List(named("w" -> 1)) ).zipWithIndex - ) { case ((encoder, params), index) => - s"using encoder #${index}" in { - encoder(fixture) must_=== params - } + ) { + case ((encoder, params), index) => + s"using encoder #${index}" in { + encoder(fixture) must_=== params + } } } @@ -296,10 +335,11 @@ final class MacroSpec extends org.specs2.mutable.Specification { ), Macro.toParameters[Goo[Int]](proj("loremIpsum", "value")) -> List(named("value" -> 1)) ).zipWithIndex - ) { case ((encoder, params), index) => - s"using encoder #${index}" in { - encoder(fixture) must_=== params - } + ) { + case ((encoder, params), index) => + s"using encoder #${index}" in { + encoder(fixture) must_=== params + } } } @@ -314,10 +354,11 @@ final class MacroSpec extends org.specs2.mutable.Specification { Fragments.foreach( Seq[(Family, List[NamedParameter])](Bar(1) -> List(named("v" -> 1)), CaseObj -> List.empty[NamedParameter]) - ) { case (i, params) => - s"for $i" in { - ToParameterList.from(i) must_=== params - } + ) { + case (i, params) => + s"for $i" in { + ToParameterList.from(i) must_=== params + } } } @@ -339,36 +380,41 @@ final class MacroSpec extends org.specs2.mutable.Specification { named("x" -> Some(false)) ) ).zipWithIndex - ) { case ((encoder, params), index) => - s"using encoder #${index}" in { - encoder(fixture) must_=== params - } + ) { + case ((encoder, params), index) => + s"using encoder #${index}" in { + encoder(fixture) must_=== params + } } } } "Generated column" should { - shapeless.test.illTyped("anorm.Macro.valueColumn[Bar]") // case class - - shapeless.test.illTyped("anorm.Macro.valueColumn[InvalidValueClass]") + "not be resolved" in { + (typecheck("Macro.valueColumn[Bar]") must failWith(".*AnyVal.*")).and { + typecheck("Macro.valueColumn[InvalidValueClass]") must failWith(".*MacroSpec.*") + } + } "be generated for a supported ValueClass" in { implicit val generated: Column[ValidValueClass] = Macro.valueColumn[ValidValueClass] - withQueryResult(RowLists.doubleList :+ 1.2d) { implicit con => - SQL("SELECT d").as(scalar[ValidValueClass].single).aka("parsed column") must_=== new ValidValueClass(1.2d) + withQueryResult(RowLists.doubleList :+ 1.2D) { implicit con => + SQL("SELECT d").as(scalar[ValidValueClass].single).aka("parsed column") must_=== new ValidValueClass(1.2D) } } } "ToStatement" should { - shapeless.test.illTyped("anorm.Macro.valueToStatement[Bar]") // case class + "not be resolved" in { + typecheck("Macro.valueToStatement[Bar]") must failWith(".*AnyVal.*") + } - val SqlDouble3s = ParamMeta.Double(23.456d) + val SqlDouble3s = ParamMeta.Double(23.456D) def withConnection[A](f: java.sql.Connection => A): A = f(connection(handleStatement.withUpdateHandler { - case UpdateExecution("set-double ?", DParam(23.456d, SqlDouble3s) :: Nil) => 1 /* case ok */ + case UpdateExecution("set-double ?", DParam(23.456D, SqlDouble3s) :: Nil) => 1 /* case ok */ case x => sys.error(s"Unexpected: $x") })) @@ -378,7 +424,7 @@ final class MacroSpec extends org.specs2.mutable.Specification { Macro.valueToStatement[ValidValueClass] withConnection { implicit c => - SQL("set-double {p}").on("p" -> new ValidValueClass(23.456d)).execute() must beFalse + SQL("set-double {p}").on("p" -> new ValidValueClass(23.456D)).execute() must beFalse } } } diff --git a/core/src/test/scala/anorm/MetaDataSpec.scala b/core/src/test/scala/anorm/MetaDataSpec.scala index 0ed190c9..52dca36f 100644 --- a/core/src/test/scala/anorm/MetaDataSpec.scala +++ b/core/src/test/scala/anorm/MetaDataSpec.scala @@ -3,8 +3,8 @@ package anorm import acolyte.jdbc.Implicits._ import acolyte.jdbc.RowLists.rowList3 -class MetaDataSpec extends org.specs2.mutable.Specification { - "Meta-data" title +final class MetaDataSpec extends org.specs2.mutable.Specification { + "Meta-data".title "Meta-data" should { "support column aliases" in { @@ -23,7 +23,8 @@ class MetaDataSpec extends org.specs2.mutable.Specification { "be parsed from resultset" >> { import scala.language.existentials - @inline def rs = (fooBarTable :+ (1L, "lorem", 3)).getRowList.resultSet() + + @inline def rs = fooBarTable.append(1L, "lorem", 3).getRowList.resultSet() val item1 = MetaDataItem(ColumnName(".id", Some("id")), false, "long") diff --git a/core/src/test/scala/anorm/ParameterMetaDataSpec.scala b/core/src/test/scala/anorm/ParameterMetaDataSpec.scala index abad327e..8cab2a6b 100644 --- a/core/src/test/scala/anorm/ParameterMetaDataSpec.scala +++ b/core/src/test/scala/anorm/ParameterMetaDataSpec.scala @@ -18,11 +18,17 @@ import java.lang.{ } import java.sql.Timestamp -class ParameterMetaDataSpec extends org.specs2.mutable.Specification { - "Parameter metadata" title +import org.specs2.matcher.TypecheckMatchers._ + +final class ParameterMetaDataSpec extends org.specs2.mutable.Specification { + "Parameter metadata".title + + import TestUtils.typecheck "Metadata" should { - shapeless.test.illTyped("implicitly[ParameterMetaData[Any]]") + "not be resolved" in { + typecheck("implicitly[ParameterMetaData[Any]]") must failWith(".*not\\ found.*") + } "be provided for parameter" >> { "of type Array of Byte" in { diff --git a/core/src/test/scala/anorm/RowSpec.scala b/core/src/test/scala/anorm/RowSpec.scala index 0f4855da..68cc892d 100644 --- a/core/src/test/scala/anorm/RowSpec.scala +++ b/core/src/test/scala/anorm/RowSpec.scala @@ -1,35 +1,40 @@ package anorm +import java.sql.Connection + import scala.util.Try import acolyte.jdbc.AcolyteDSL.withQueryResult import acolyte.jdbc.Implicits._ import acolyte.jdbc.RowLists.{ rowList1, rowList2, stringList } -class RowSpec extends org.specs2.mutable.Specification { - "Row" title +final class RowSpec extends org.specs2.mutable.Specification { + "Row".title "List of column values" should { - "be expected one" in withQueryResult(rowList2(classOf[String] -> "foo", classOf[Int] -> "bar") :+ ("row1", 100)) { - implicit c => - SQL("SELECT * FROM test").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List( - "row1", - 100 - ) + "be expected one" in withQueryResult( + rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100) + ) { implicit c: Connection => + SQL("SELECT * FROM test").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List( + "row1", + 100 + ) } - "keep null if not nullable" in withQueryResult(stringList :+ null) { implicit c => - SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List(null) + "keep null if not nullable" in withQueryResult(stringList :+ null) { implicit c: Connection => + SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single)(c).aka("column list") must_=== List(null) } - "turn null into None if nullable" in withQueryResult(stringList.withNullable(1, true) :+ null) { implicit c => - SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List(None) + "turn null into None if nullable" in withQueryResult(stringList.withNullable(1, true) :+ null) { + implicit c: Connection => + SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List(None) } - "turn value into Some(X) if nullable" in withQueryResult(stringList.withNullable(1, true) :+ "str") { implicit c => - SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List(Some("str")) + "turn value into Some(X) if nullable" in withQueryResult(stringList.withNullable(1, true) :+ "str") { + implicit c: Connection => + SQL("SELECT 1").as(RowParser(r => Success(r.asList)).single).aka("column list") must_=== List(Some("str")) } @@ -39,6 +44,7 @@ class RowSpec extends org.specs2.mutable.Specification { val meta2 = MetaDataItem(ColumnName("table.id", Some("second_id")), false, "java.lang.Integer") val metaData = MetaData(List(meta1, meta2)) val row = ResultRow(metaData, List(1, 2)) + row.get("second_id") must beRight((2, meta2)) } @@ -48,58 +54,63 @@ class RowSpec extends org.specs2.mutable.Specification { val meta2 = MetaDataItem(ColumnName("data.name", Some("CorrectAlias")), false, "java.lang.String") val metaData = MetaData(List(meta1, meta2)) val row = ResultRow(metaData, List("IncorrectString", "CorrectString")) + row.get("CorrectAlias") must beRight(("CorrectString", meta2)) } } "Column dictionary" should { - "be expected one" in withQueryResult(rowList2(classOf[String] -> "foo", classOf[Int] -> "bar") :+ ("row1", 100)) { - implicit c => - SQL("SELECT * FROM test").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map( - ".foo" -> "row1", - ".bar" -> 100 - ) + "be expected one" in withQueryResult( + rowList2(classOf[String] -> "foo", classOf[Int] -> "bar").append("row1", 100) + ) { implicit c: Connection => + SQL("SELECT * FROM test").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map( + ".foo" -> "row1", + ".bar" -> 100 + ) } - "keep null if not nullable" in withQueryResult(rowList1(classOf[String] -> "foo") :+ null) { implicit c => - SQL("SELECT 1").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map(".foo" -> null) + "keep null if not nullable" in withQueryResult(rowList1(classOf[String] -> "foo") :+ null) { + implicit c: Connection => + SQL("SELECT 1").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map(".foo" -> null) } "turn null into None if nullable" in withQueryResult( rowList1(classOf[String] -> "foo").withNullable(1, true) :+ null - ) { implicit c => + ) { implicit c: Connection => SQL("SELECT 1").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map(".foo" -> None) } "turn value into Some(X) if nullable" in withQueryResult( rowList1(classOf[String] -> "foo").withNullable(1, true) :+ "str" - ) { implicit c => + ) { implicit c: Connection => SQL("SELECT 1").as(RowParser(r => Success(r.asMap)).single).aka("column map") must_=== Map(".foo" -> Some("str")) } } "Column" should { - "be extracted by name" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "byName") { implicit c => - SQL("SELECT *").as(RowParser(r => Success(r[String]("foo"))).single).aka("column by name") must_=== "byName" + "be extracted by name" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "byName") { + implicit c: Connection => + SQL("SELECT *").as(RowParser(r => Success(r[String]("foo"))).single).aka("column by name") must_=== "byName" } - "be extracted by position" in withQueryResult(stringList :+ "byPos") { implicit c => + "be extracted by position" in withQueryResult(stringList :+ "byPos") { implicit c: Connection => SQL("SELECT *").as(RowParser(r => Success(r[String](1))).single).aka("column by name") must_=== "byPos" } } "Row" should { "successfully be parsed" in withQueryResult( - rowList2(classOf[String] -> "foo", classOf[Int] -> "num") :+ ("str", 2) - ) { implicit c => + rowList2(classOf[String] -> "foo", classOf[Int] -> "num").append("str", 2) + ) { implicit c: Connection => SQL"SELECT *" - .withResult(_.map(_.row.as((SqlParser.str("foo") ~ SqlParser.int(2)).map { case a ~ b => - b -> a + .withResult(_.map(_.row.as((SqlParser.str("foo") ~ SqlParser.int(2)).map { + case a ~ b => + b -> a }))) .aka("streaming result") must beRight[Option[Try[(Int, String)]]].which { _.aka("first row") must beSome[Try[(Int, String)]].which { diff --git a/core/src/test/scala/anorm/SqlRequestErrorSpec.scala b/core/src/test/scala/anorm/SqlRequestErrorSpec.scala index 50e76793..d3a95cf4 100644 --- a/core/src/test/scala/anorm/SqlRequestErrorSpec.scala +++ b/core/src/test/scala/anorm/SqlRequestErrorSpec.scala @@ -1,7 +1,7 @@ package anorm class SqlRequestErrorSpec extends org.specs2.mutable.Specification { - "SQL request error" title + "SQL request error".title "ColumnNotFound" should { "be converted to Failure" in { diff --git a/core/src/test/scala/anorm/SqlResultSpec.scala b/core/src/test/scala/anorm/SqlResultSpec.scala index 13c98d3f..edc98163 100644 --- a/core/src/test/scala/anorm/SqlResultSpec.scala +++ b/core/src/test/scala/anorm/SqlResultSpec.scala @@ -1,5 +1,11 @@ package anorm +import java.io.Closeable + +import java.sql.Connection + +import scala.reflect.ClassTag + import acolyte.jdbc.AcolyteDSL.{ connection, handleQuery, withQueryResult } import acolyte.jdbc.Implicits._ import acolyte.jdbc.QueryResult @@ -7,10 +13,16 @@ import acolyte.jdbc.RowLists.{ rowList1, rowList2, stringList } final class SqlResultSpec extends org.specs2.mutable.Specification with H2Database { - "SQL result" title + "SQL result".title + + object Resources { + val closeableTag = implicitly[ClassTag[Closeable]] + } + + private implicit val tag: ClassTag[Closeable] = Resources.closeableTag - "For-comprehension over result" should { - "fail when there is no data" in withQueryResult("scalar") { implicit c => + "For-comprehension/map over result" should { + "fail when there is no data" in withQueryResult("scalar") { implicit c: Connection => lazy val parser = for { a <- SqlParser.str("col1") b <- SqlParser.int("col2") @@ -20,8 +32,8 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } "return expected mandatory single result" in withQueryResult( - rowList2(classOf[String] -> "a", classOf[Int] -> "b") :+ ("str", 2) - ) { implicit c => + rowList2(classOf[String] -> "a", classOf[Int] -> "b").append("str", 2) + ) { implicit c: Connection => lazy val parser = for { a <- SqlParser.str("a") b <- SqlParser.int("b") @@ -31,7 +43,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } "fail with sub-parser when there is no data" >> { - "by throwing exception" in withQueryResult("scalar") { implicit c => + "by throwing exception" in withQueryResult("scalar") { implicit c: Connection => lazy val sub = for { b <- SqlParser.str("b") c <- SqlParser.int("c") @@ -45,7 +57,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa SQL("SELECT * FROM test").as(parser.single) must throwA[Exception](message = "'col1' not found") } - "with captured failure" in withQueryResult("scalar") { implicit c => + "with captured failure" in withQueryResult("scalar") { implicit c: Connection => lazy val sub = for { b <- SqlParser.str("b") c <- SqlParser.int("c") @@ -63,7 +75,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } "fail when column is missing for sub-parser" in withQueryResult(rowList1(classOf[String] -> "a") :+ "str") { - implicit c => + implicit c: Connection => lazy val sub = for { b <- SqlParser.str("col2") c <- SqlParser.int("col3") @@ -77,56 +89,71 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa SQL("SELECT * FROM test").as(parser.single) must throwA[Exception](message = "'col2' not found") } - "return None from optional sub-parser" in withQueryResult(rowList1(classOf[String] -> "a") :+ "str") { implicit c => - lazy val sub = for { - b <- SqlParser.str("b") - c <- SqlParser.int("c") - } yield b -> c + "return None from optional sub-parser" in withQueryResult(rowList1(classOf[String] -> "a") :+ "str") { + implicit c: Connection => + lazy val sub = for { + b <- SqlParser.str("b") + c <- SqlParser.int("c") + } yield b -> c - lazy val parser = for { - a <- SqlParser.str("a") - bc <- sub.? - } yield a -> bc + lazy val parser = for { + a <- SqlParser.str("a") + bc <- sub.? + } yield a -> bc - SQL("SELECT * FROM test").as(parser.single) must_=== ("str" -> None) + SQL("SELECT * FROM test").as(parser.single) must_=== ("str" -> None) + } + } + + "Successful result" in { + "be collected" in { + val suc = anorm.Success("foo") + + (suc.collect { + case "foo" => 1 + } must_=== anorm.Success(1)).and { + suc.collect { + case "bar" => 1 + } must_=== anorm.Error(SqlMappingError("Value foo is not matching")) + } } } "Column" should { - "be found in result" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { implicit c => - SQL("SELECT f").as(SqlParser.matches("f", 1.2f).single) must beTrue + "be found in result" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("f", 1.2F).single) must beTrue } - "not be found in result when value not matching" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { - implicit c => - SQL("SELECT f").as(SqlParser.matches("f", 2.34f).single) must beFalse + "not be found in result when value not matching" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { + implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("f", 2.34F).single) must beFalse } - "not be found in result when column missing" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { - implicit c => - SQL("SELECT f").as(SqlParser.matches("x", 1.2f).single) must beFalse + "not be found in result when column missing" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { + implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("x", 1.2F).single) must beFalse } - "be matching in result" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { implicit c => - SQL("SELECT f").as(SqlParser.matches("f", 1.2f).single) must beTrue + "be matching in result" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("f", 1.2F).single) must beTrue } - "not be found in result when value not matching" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { - implicit c => - SQL("SELECT f").as(SqlParser.matches("f", 2.34f).single) must beFalse + "not be found in result when value not matching" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { + implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("f", 2.34F).single) must beFalse } - "not be found in result when column missing" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2f) { - implicit c => - SQL("SELECT f").as(SqlParser.matches("x", 1.2f).single) must beFalse + "not be found in result when column missing" in withQueryResult(rowList1(classOf[Float] -> "f") :+ 1.2F) { + implicit c: Connection => + SQL("SELECT f").as(SqlParser.matches("x", 1.2F).single) must beFalse } - "be None when missing" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "bar") { implicit c => + "be None when missing" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "bar") { implicit c: Connection => SQL"SELECT *".as(SqlParser.str("lorem").?.single).aka("result") must beNone } "be None when NULL" in withQueryResult(rowList1(classOf[String] -> "foo") :+ null.asInstanceOf[String]) { - implicit c => + implicit c: Connection => SQL"SELECT *".as(SqlParser.str("foo").?.single) must beNone } @@ -141,15 +168,15 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa case "XB" => Xb } - "return Xa object" in withQueryResult(stringList :+ "XA") { implicit c => + "return Xa object" in withQueryResult(stringList :+ "XA") { implicit c: Connection => SQL"SELECT str".as(SqlParser.str(1).collect("ERR")(pf).single).aka("collected") must_=== Xa } - "return Xb object" in withQueryResult(stringList :+ "XB") { implicit c => + "return Xb object" in withQueryResult(stringList :+ "XB") { implicit c: Connection => SQL"SELECT str".as(SqlParser.str(1).collect("ERR")(pf).single).aka("collected") must_=== Xb } - "fail" in withQueryResult(stringList :+ "XC") { implicit c => + "fail" in withQueryResult(stringList :+ "XC") { implicit c: Connection => SQL"SELECT str".as(SqlParser.str(1).collect("ERR")(pf).single).aka("collecting") must throwA[Exception]( "SqlMappingError\\(ERR\\)" ) @@ -157,7 +184,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } "Aggregation over all rows" should { - "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => + "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c: Connection => val res: SqlQueryResult = SQL"SELECT str".executeQuery() var closed = false @@ -168,17 +195,20 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa // .fold w/o ColumnAliaser is deprecated @com.github.ghik.silencer.silent - def agg = sqlResult.fold(List[Int]()) { (l, _) => + def agg = sqlResult.fold(List[Int](), ColumnAliaser.empty) { (l, _) => i = i + 1; l :+ i } - (agg.aka("aggregation") must beRight(List(1, 2, 3))) - .and(closed.aka("resource release") must beTrue) - .and(i.aka("row count") must_=== 3) - + agg.aka("aggregation") must beRight[List[Int]].which { + _ must_=== List(1, 2, 3) and { + closed.aka("resource release") must beTrue + } and { + i.aka("row count") must_=== 3 + } + } } - "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => + "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c: Connection => val res: SqlQueryResult = SQL"SELECT str".executeQuery() var closed = false @@ -190,15 +220,18 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa i = i + 1; l :+ i } - (agg.aka("aggregation") must beRight(List(1, 2, 3))) - .and(closed.aka("resource release") must beTrue) - .and(i.aka("row count") must_=== 3) - + agg.aka("aggregation") must beRight[List[Int]].which { + _ must_=== List(1, 2, 3) and { + closed.aka("resource release") must beTrue + } and { + i.aka("row count") must_=== 3 + } + } } "release resources on exception (with degraded result set)" in { queryResultAndOptions(stringList :+ "A" :+ "B" :+ "C", List("acolyte.resultSet.initOnFirstRow" -> "true")) { - implicit c => + implicit c: Connection => val res: SqlQueryResult = SQL"SELECT str".withResultSetOnFirstRow(true).executeQuery() @@ -212,8 +245,9 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa if (i == 1) sys.error("Unexpected") else { i = i + 1; l :+ i } } - (agg.aka("aggregation") must beLike { case Left(err :: Nil) => - err.getMessage.aka("failure") must_=== "Unexpected" + (agg.aka("aggregation") must beLike { + case Left(err :: Nil) => + err.getMessage.aka("failure") must_=== "Unexpected" }).and(closed.aka("resource release") must beTrue).and(i.aka("row count") must_=== 1) } @@ -221,26 +255,30 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } "Aggregation over variable number of rows" should { - "support user alias in fold" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => + "support user alias in fold" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c: Connection => val parser = SqlParser.str("foo") SQL"SELECT str".executeQuery().fold(List.empty[String], ColumnAliaser.withPattern(Set(1), "foo")) { (ls, row) => parser(row).fold(_ => ls, _ :: ls) - } must beRight(List("C", "B", "A")) + } must beRight[List[String]].which { + _ must_=== List("C", "B", "A") + } } "support user alias in foldWhile" in withQueryResult(rowList1(classOf[String] -> "bar") :+ "A" :+ "B" :+ "C") { - implicit c => + implicit c: Connection => val parser = SqlParser.str("foo.bar.lorem") SQL"SELECT str" .executeQuery() .foldWhile(List.empty[String], ColumnAliaser.withPattern(Set(1), "foo.", ".lorem")) { (ls, row) => parser(row).fold(_ => ls, _ :: ls) -> true - } must beRight(List("C", "B", "A")) + } must beRight[List[String]].which { + _ must_=== List("C", "B", "A") + } } - "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => + "release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c: Connection => val res: SqlQueryResult = SQL"SELECT str".executeQuery() var closed = false @@ -249,16 +287,20 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa var i = 0 lazy val agg = res.copy(resultSet = res.resultSet.and(probe).map(_._1)).foldWhile(List.empty[Int], ColumnAliaser.empty) { - (l, _) => i = i + 1; (l :+ i) -> true + (l, _) => + i = i + 1; (l :+ i) -> true } - (agg.aka("aggregation") must beRight(List(1, 2, 3))) - .and(closed.aka("resource release") must beTrue) - .and(i.aka("row count") must_=== 3) - + agg.aka("aggregation") must beRight[List[Int]].which { + _ must_=== List(1, 2, 3) and { + closed.aka("resource release") must beTrue + } and { + i.aka("row count") must_=== 3 + } + } } - "release resources on exception" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => + "release resources on exception" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c: Connection => val res: SqlQueryResult = SQL"SELECT str".executeQuery() var closed = false @@ -274,29 +316,34 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } } - (agg.aka("aggregation") must beLike { case Left(err :: Nil) => - err.getMessage.aka("failure") must_=== "Unexpected" + (agg.aka("aggregation") must beLike { + case Left(err :: Nil) => + err.getMessage.aka("failure") must_=== "Unexpected" }).and(closed.aka("resource release") must beTrue).and(i.aka("row count") must_=== 1) } - "stop after second row & release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { implicit c => - - val res: SqlQueryResult = SQL"SELECT str".executeQuery() - var closed = false - val probe = resource.managed(new java.io.Closeable { def close() = closed = true }) + "stop after second row & release resources" in withQueryResult(stringList :+ "A" :+ "B" :+ "C") { + implicit c: Connection => - var i = 0 - lazy val agg = - res.copy(resultSet = res.resultSet.and(probe).map(_._1)).foldWhile(List.empty[Int], ColumnAliaser.empty) { - (l, _) => - if (i == 2) (l, false) else { i = i + 1; (l :+ i) -> true } - } + val res: SqlQueryResult = SQL"SELECT str".executeQuery() + var closed = false + val probe = resource.managed(new java.io.Closeable { def close() = closed = true }) - (agg.aka("aggregation") must beRight(List(1, 2))) - .and(closed.aka("resource release") must beTrue) - .and(i.aka("row count") must_=== 2) + var i = 0 + lazy val agg = + res.copy(resultSet = res.resultSet.and(probe).map(_._1)).foldWhile(List.empty[Int], ColumnAliaser.empty) { + (l, _) => + if (i == 2) (l, false) else { i = i + 1; (l :+ i) -> true } + } + agg.aka("aggregation") must beRight[List[Int]].which { + _ must_=== List(1, 2) and { + closed.aka("resource release") must beTrue + } and { + i.aka("row count") must_=== 2 + } + } } } @@ -307,13 +354,13 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa case _ => l } - "do nothing when there is no result" in withQueryResult(QueryResult.Nil) { implicit c => - SQL"EXEC test".executeQuery().withResult(go(_)).aka("iteration") must beRight.which( + "do nothing when there is no result" in withQueryResult(QueryResult.Nil) { implicit c: Connection => + SQL"EXEC test".executeQuery().withResult(go(_)).aka("iteration") must beRight[List[Row]].which { _.aka("result list") must beEmpty - ) + } } - "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c => + "handle failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { implicit c: Connection => var first = false (SQL"SELECT str" .executeQuery() @@ -322,34 +369,35 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa first = true; sys.error("Failure") case _ => sys.error("Unexpected") } - .aka("processing with failure") must beLeft.like { case err :: Nil => - err.getMessage.aka("failure") must_=== "Failure" + .aka("processing with failure") must beLeft.like { + case err :: Nil => + err.getMessage.aka("failure") must_=== "Failure" }).and(first.aka("first read") must beTrue) } "stop after first row without failure" in withQueryResult(rowList1(classOf[String] -> "foo") :+ "A" :+ "B") { - implicit c => + implicit c: Connection => SQL"SELECT str" .executeQuery() .withResult { case Some(first) => Set(first.row[String]("foo")) case _ => Set.empty[String] } - .aka("partial processing") must beRight.which { r => - r must_=== Set("A") + .aka("partial processing") must beRight[Set[String]].which { + _ must_=== Set("A") } } } "SQL warning" should { - "not be there on success" in withQueryResult(stringList :+ "A") { implicit c => + "not be there on success" in withQueryResult(stringList :+ "A") { implicit c: Connection => SQL"SELECT str".executeQuery().statementWarning.aka("statement warning") must beNone } "be handled from executed query" in withQueryResult(QueryResult.Nil.withWarning("Warning for test-proc-2")) { - implicit c => + implicit c: Connection => SQL("EXEC stored_proc({param})") .on("param" -> "test-proc-2") @@ -365,19 +413,19 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa val foo = s"alias-${System.identityHashCode(this)}" val (v1, v2) = (s"1-$foo", s"2-$foo") - "be found by name" in withTestDB(v1) { implicit c => + "be found by name" in withTestDB(v1) { implicit c: Connection => SQL"SELECT foo AS AL, bar FROM test1".as(SqlParser.str("foo").single).aka("by name") must_=== v1 } - "be found by alias" in withTestDB(v2) { implicit c => + "be found by alias" in withTestDB(v2) { implicit c: Connection => (SQL"SELECT foo AS AL, bar FROM test1".as(SqlParser.str("foo").single).aka("by name") must_=== v2) .and(SQL"SELECT foo AS AL, bar FROM test1".as(SqlParser.str("AL").single).aka("by alias") must_=== v2) } "be found by alias when column name is duplicated" in { - withH2Database { implicit c => + withH2Database { implicit c: Connection => createTest1Table() val id1 = System.identityHashCode(c).toLong @@ -393,7 +441,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa } } - "be found by user alias" in withTestDB(v2) { implicit c => + "be found by user alias" in withTestDB(v2) { implicit c: Connection => (SQL"SELECT foo AS AL, bar FROM test1" .asTry(SqlParser.str("pre.AL").single, ColumnAliaser.withPattern1("pre.")(1)) .aka("by user alias") must beSuccessfulTry(v2)).and( @@ -408,7 +456,7 @@ final class SqlResultSpec extends org.specs2.mutable.Specification with H2Databa // --- def withTestDB[T](foo: String)(f: java.sql.Connection => T): T = - withH2Database { implicit c => + withH2Database { implicit c: Connection => createTest1Table() SQL("insert into test1(id, foo, bar) values ({id}, {foo}, {bar})") diff --git a/core/src/test/scala/anorm/StatementParserSpec.scala b/core/src/test/scala/anorm/StatementParserSpec.scala index 56bd9399..3757d981 100644 --- a/core/src/test/scala/anorm/StatementParserSpec.scala +++ b/core/src/test/scala/anorm/StatementParserSpec.scala @@ -5,8 +5,8 @@ import acolyte.jdbc.AcolyteDSL.{ connection, handleQuery, withQueryResult } import acolyte.jdbc.Implicits._ import acolyte.jdbc.RowLists.stringList -class StatementParserSpec extends org.specs2.mutable.Specification { - "SQL statement parser" title +final class StatementParserSpec extends org.specs2.mutable.Specification { + "SQL statement parser".title "Statement" should { "be parsed with 'name' and 'cat' parameters and support multiple lines" in { @@ -142,35 +142,38 @@ class StatementParserSpec extends org.specs2.mutable.Specification { 0, new StringBuilder(), List.empty[(Int, ParameterValue)] - ) must beSuccessfulTry.like { case prepared1 => - (prepared1._2.aka("parameters #1") must_=== List[(Int, ParameterValue)](0 -> List("a", "b"), 2 -> 3)) - .and { - Sql.query( - stmt2.tokens, - stmt2.names.toList, - Map[String, ParameterValue]("cs_1" -> "a", "cs_2" -> "b", "id" -> 3), - 0, - new StringBuilder(), - List.empty[(Int, ParameterValue)] - ) must beSuccessfulTry.like { case prepared2 => - (prepared1._1.aka("sql") must_=== prepared2._1).and( - prepared2._2.aka("parameters #2") must_=== (List[(Int, ParameterValue)](0 -> "a", 1 -> "b", 2 -> 3)) - ) + ) must beSuccessfulTry.like { + case prepared1 => + (prepared1._2.aka("parameters #1") must_=== List[(Int, ParameterValue)](0 -> List("a", "b"), 2 -> 3)) + .and { + Sql.query( + stmt2.tokens, + stmt2.names.toList, + Map[String, ParameterValue]("cs_1" -> "a", "cs_2" -> "b", "id" -> 3), + 0, + new StringBuilder(), + List.empty[(Int, ParameterValue)] + ) must beSuccessfulTry.like { + case prepared2 => + (prepared1._1.aka("sql") must_=== prepared2._1).and( + prepared2._2.aka("parameters #2") must_=== (List[(Int, ParameterValue)](0 -> "a", 1 -> "b", 2 -> 3)) + ) + } } - } - .and { - Sql.query( - stmt3.tokens, - stmt3.names.toList, - Map[String, ParameterValue]("cs" -> List("a", "b"), "id" -> 3), - 0, - new StringBuilder(), - List.empty[(Int, ParameterValue)] - ) must beSuccessfulTry.like { case prepared3 => - (prepared3._1.aka("sql") must_=== prepared1._1) - .and(prepared3._2.aka("parameters #3") must_=== prepared1._2) + .and { + Sql.query( + stmt3.tokens, + stmt3.names.toList, + Map[String, ParameterValue]("cs" -> List("a", "b"), "id" -> 3), + 0, + new StringBuilder(), + List.empty[(Int, ParameterValue)] + ) must beSuccessfulTry.like { + case prepared3 => + (prepared3._1.aka("sql") must_=== prepared1._1) + .and(prepared3._2.aka("parameters #3") must_=== prepared1._2) + } } - } } } @@ -199,8 +202,9 @@ class StatementParserSpec extends org.specs2.mutable.Specification { 0, new StringBuilder(), List.empty[(Int, ParameterValue)] - ) must beFailedTry.like { case err: Sql.MissingParameter => - err.getMessage must startWith("""Missing parameter value for 'id' after: "SELECT""") + ) must beFailedTry.like { + case err: Sql.MissingParameter => + err.getMessage must startWith("""Missing parameter value for 'id' after: "SELECT""") } } @@ -212,10 +216,11 @@ class StatementParserSpec extends org.specs2.mutable.Specification { 0, new StringBuilder(), List.empty[(Int, ParameterValue)] - ) must beSuccessfulTry.like { case (sql, (0, pv) :: Nil) => - (sql must_=== "SELECT * FROM name LIKE '%strange' AND id = ?").and( - pv must_=== ParameterValue.toParameterValue("foo") - ) + ) must beSuccessfulTry.like { + case (sql, (0, pv) :: Nil) => + (sql must_=== "SELECT * FROM name LIKE '%strange' AND id = ?").and( + pv must_=== ParameterValue.from("foo") + ) } } } @@ -252,7 +257,7 @@ class StatementParserSpec extends org.specs2.mutable.Specification { val cmd = "SELECT" val clause = "FROM" val table = "Test" - implicit val con = connection(handleQuery { + implicit val con: java.sql.Connection = connection(handleQuery { case QueryExecution( "SELECT * FROM Test WHERE id = ? AND code IN (?, ?)", DParam("id1", ParamMeta.Str) :: DParam(2, ParamMeta.Int) :: diff --git a/core/src/test/scala/anorm/TupleFlattenerSpec.scala b/core/src/test/scala/anorm/TupleFlattenerSpec.scala index ed2d41f1..299b3305 100644 --- a/core/src/test/scala/anorm/TupleFlattenerSpec.scala +++ b/core/src/test/scala/anorm/TupleFlattenerSpec.scala @@ -6,16 +6,16 @@ import acolyte.jdbc.RowLists._ import SqlParser.{ bool, str, int, long, get } -class TupleFlattenerSpec extends org.specs2.mutable.Specification { - "Tuple flattener" title +final class TupleFlattenerSpec extends org.specs2.mutable.Specification { + "Tuple flattener".title "Raw tuple-like" should { "be flatten from 2 columns to Tuple2" in { val schema = rowList2(classOf[String] -> "A", classOf[Int] -> "B") - withQueryResult(schema :+ ("A", 2)) { implicit c => + withQueryResult(schema.append("A", 2)) { implicit c => SQL("SELECT * FROM test") - .as((str("A") ~ int("B")).map(SqlParser.flatten) single) + .as((str("A") ~ int("B")).map(SqlParser.flatten).single) .aka("flatten columns") must_=== Tuple2("A", 2) } @@ -24,9 +24,9 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "be flatten from 3 columns to Tuple3" in { val schema = rowList3(classOf[String] -> "A", classOf[Int] -> "B", classOf[Long] -> "C") - withQueryResult(schema :+ ("A", 2, 3L)) { implicit c => + withQueryResult(schema.append("A", 2, 3L)) { implicit c => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C")).map(SqlParser.flatten) single) + .as((str("A") ~ int("B") ~ long("C")).map(SqlParser.flatten).single) .aka("flatten columns") must_=== Tuple3("A", 2, 3L) } @@ -35,10 +35,10 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "be flatten from 4 columns to Tuple4" in { val schema = rowList4(classOf[String] -> "A", classOf[Int] -> "B", classOf[Long] -> "C", classOf[Double] -> "D") - withQueryResult(schema :+ ("A", 2, 3L, 4.56d)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D)) { implicit c => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D")).map(SqlParser.flatten) single) - .aka("flatten columns") must_=== Tuple4("A", 2, 3L, 4.56d) + .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D")).map(SqlParser.flatten).single) + .aka("flatten columns") must_=== Tuple4("A", 2, 3L, 4.56D) } } @@ -52,10 +52,10 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Short] -> "E" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort)) { implicit c => SQL("SELECT * FROM test") - .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E")).map(SqlParser.flatten) single) - .aka("flatten columns") must_=== Tuple5("A", 2, 3L, 4.56d, 9.toShort) + .as((str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E")).map(SqlParser.flatten).single) + .aka("flatten columns") must_=== Tuple5("A", 2, 3L, 4.56D, 9.toShort) } } @@ -70,13 +70,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Byte] -> "F" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) - .aka("flatten columns") must_=== Tuple6("A", 2, 3L, 4.56d, 9.toShort, 10.toByte) + .aka("flatten columns") must_=== Tuple6("A", 2, 3L, 4.56D, 9.toShort, 10.toByte) } } @@ -92,13 +93,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Boolean] -> "G" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) - .aka("flatten columns") must_=== Tuple7("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true) + .aka("flatten columns") must_=== Tuple7("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true) } } @@ -115,14 +117,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[String] -> "H" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B")) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B")) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - )).map(SqlParser.flatten) single + )).map(SqlParser.flatten).single ) - .aka("flatten columns") must_=== Tuple8("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B") + .aka("flatten columns") must_=== Tuple8("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B") } } @@ -140,14 +142,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Int] -> "I" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I")).map(SqlParser.flatten) single + ) ~ int("I")).map(SqlParser.flatten).single ) - .aka("flatten columns") must_=== Tuple9("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3) + .aka("flatten columns") must_=== Tuple9("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3) } } @@ -166,14 +168,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Long] -> "J" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I") ~ long("J")).map(SqlParser.flatten) single + ) ~ int("I") ~ long("J")).map(SqlParser.flatten).single ) - .aka("flatten columns") must_=== Tuple10("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L) + .aka("flatten columns") must_=== Tuple10("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L) } } @@ -193,14 +195,14 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Double] -> "K" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d)) { implicit c => + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I") ~ long("J") ~ get[Double]("K")).map(SqlParser.flatten) single + ) ~ int("I") ~ long("J") ~ get[Double]("K")).map(SqlParser.flatten).single ) - .aka("flatten columns") must_=== Tuple11("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d) + .aka("flatten columns") must_=== Tuple11("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D) } } @@ -221,26 +223,26 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { classOf[Short] -> "L" ) - withQueryResult(schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort)) { + withQueryResult(schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort)) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" - ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L")).map(SqlParser.flatten) single + ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple12( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort ) @@ -265,27 +267,28 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte) + schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte) ) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) .aka("flatten columns") must_=== Tuple13( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte ) @@ -312,27 +315,28 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false) + schema.append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte, false) ) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) .aka("flatten columns") must_=== Tuple14( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false @@ -361,27 +365,29 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C") + schema + .append("A", 2, 3L, 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67D, 10.toShort, 11.toByte, false, "C") ) { implicit c => SQL("SELECT * FROM test") .as( (str("A") ~ int("B") ~ long("C") ~ get[Double]("D") ~ get[Short]("E") ~ get[Byte]("F") ~ bool("G") ~ str( "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) .aka("flatten columns") must_=== Tuple15( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, @@ -412,7 +418,24 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3 + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -420,20 +443,20 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - )).map(SqlParser.flatten) single + )).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple16( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, @@ -466,7 +489,25 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -474,20 +515,20 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q")).map(SqlParser.flatten) single + ) ~ long("Q")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple17( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, @@ -522,7 +563,26 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -530,27 +590,27 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R")).map(SqlParser.flatten) single + ) ~ long("Q") ~ get[Double]("R")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple18( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, "C", 3, 4L, - 5.678d + 5.678D ) } @@ -580,7 +640,27 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -588,27 +668,27 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S")).map(SqlParser.flatten) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple19( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, "C", 3, 4L, - 5.678d, + 5.678D, 16.toShort ) @@ -640,7 +720,28 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D") + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D" + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -648,27 +749,27 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T")).map(SqlParser.flatten) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple20( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, "C", 3, 4L, - 5.678d, + 5.678D, 16.toShort, "D" ) @@ -702,7 +803,29 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D", 4) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D", + 4 + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -710,27 +833,27 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { "H" ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" - ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U")).map(SqlParser.flatten) single + ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U")).map(SqlParser.flatten).single ) .aka("flatten columns") must_=== Tuple21( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, "C", 3, 4L, - 5.678d, + 5.678D, 16.toShort, "D", 4 @@ -766,7 +889,30 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) withQueryResult( - schema :+ ("A", 2, 3L, 4.56d, 9.toShort, 10.toByte, true, "B", 3, 4L, 5.67d, 10.toShort, 11.toByte, false, "C", 3, 4L, 5.678d, 16.toShort, "D", 4, 5L) + schema.append( + "A", + 2, + 3L, + 4.56D, + 9.toShort, + 10.toByte, + true, + "B", + 3, + 4L, + 5.67D, + 10.toShort, + 11.toByte, + false, + "C", + 3, + 4L, + 5.678D, + 16.toShort, + "D", + 4, + 5L + ) ) { implicit c => SQL("SELECT * FROM test") .as( @@ -775,27 +921,28 @@ class TupleFlattenerSpec extends org.specs2.mutable.Specification { ) ~ int("I") ~ long("J") ~ get[Double]("K") ~ get[Short]("L") ~ get[Byte]("M") ~ bool("N") ~ str("O") ~ int( "P" ) ~ long("Q") ~ get[Double]("R") ~ get[Short]("S") ~ str("T") ~ int("U") ~ long("V")) - .map(SqlParser.flatten) single + .map(SqlParser.flatten) + .single ) .aka("flatten columns") must_=== Tuple22( "A", 2, 3L, - 4.56d, + 4.56D, 9.toShort, 10.toByte, true, "B", 3, 4L, - 5.67d, + 5.67D, 10.toShort, 11.toByte, false, "C", 3, 4L, - 5.678d, + 5.678D, 16.toShort, "D", 4, diff --git a/docs/manual/working/scalaGuide/main/sql/AnormEnumeratum.md b/docs/manual/working/scalaGuide/main/sql/AnormEnumeratum.md index b7fc8ef1..21d7bf97 100644 --- a/docs/manual/working/scalaGuide/main/sql/AnormEnumeratum.md +++ b/docs/manual/working/scalaGuide/main/sql/AnormEnumeratum.md @@ -12,6 +12,8 @@ libraryDependencies ++= Seq( ) ``` +> As Enumeratum itself is not yet compatible, this module is not available for Scala 3. + ## Usage Using this module, enums can be parsed from database columns and passed as parameters. diff --git a/enumeratum/src/main/scala/values/ValueEnumColumn.scala b/enumeratum/src/main/scala/values/ValueEnumColumn.scala index af2d0764..be284d3e 100644 --- a/enumeratum/src/main/scala/values/ValueEnumColumn.scala +++ b/enumeratum/src/main/scala/values/ValueEnumColumn.scala @@ -6,16 +6,17 @@ import anorm.{ Column, TypeDoesNotMatch } private[values] object ValueEnumColumn { def apply[ValueType, EntryType <: ValueEnumEntry[ValueType]](e: ValueEnum[ValueType, EntryType])(implicit baseColumn: Column[ValueType] - ): Column[EntryType] = Column.nonNull[EntryType] { case (value, meta) => - baseColumn(value, meta) match { - case Left(err) => - Left(err) + ): Column[EntryType] = Column.nonNull[EntryType] { + case (value, meta) => + baseColumn(value, meta) match { + case Left(err) => + Left(err) - case Right(s) => - e.withValueOpt(s) match { - case Some(obj) => Right(obj) - case None => Left(TypeDoesNotMatch(s"Invalid value: $s")) - } - } + case Right(s) => + e.withValueOpt(s) match { + case Some(obj) => Right(obj) + case None => Left(TypeDoesNotMatch(s"Invalid value: $s")) + } + } } } diff --git a/enumeratum/src/test/scala/EnumColumnSpec.scala b/enumeratum/src/test/scala/EnumColumnSpec.scala index 6b95e73f..eea263c5 100644 --- a/enumeratum/src/test/scala/EnumColumnSpec.scala +++ b/enumeratum/src/test/scala/EnumColumnSpec.scala @@ -28,8 +28,9 @@ final class EnumColumnSpec extends org.specs2.mutable.Specification { "not be parsed as Column from invalid String representation" >> { def spec(title: String, repr: String) = title in withQueryResult(RowLists.stringList :+ repr) { implicit con => - SQL("SELECT v").as(scalar[Dummy].single) must throwA[Exception].like { case NonFatal(cause) => - cause must_=== AnormException(TypeDoesNotMatch(s"Invalid value: $repr").message) + SQL("SELECT v").as(scalar[Dummy].single) must throwA[Exception].like { + case NonFatal(cause) => + cause must_=== AnormException(TypeDoesNotMatch(s"Invalid value: $repr").message) } } @@ -41,12 +42,15 @@ final class EnumColumnSpec extends org.specs2.mutable.Specification { "not be parsed as Column from non-String values" >> { def spec(tpe: String, rowList: RowList[_]) = tpe in withQueryResult(rowList) { implicit con => - SQL("SELECT v").as(scalar[Dummy].single) must throwA[Exception].like { case NonFatal(cause) => - cause must_=== AnormException(TypeDoesNotMatch(s"Column '.null' expected to be String; Found $tpe").message) + SQL("SELECT v").as(scalar[Dummy].single) must throwA[Exception].like { + case NonFatal(cause) => + cause must_=== AnormException( + TypeDoesNotMatch(s"Column '.null' expected to be String; Found $tpe").message + ) } } - spec("float", RowLists.floatList :+ 0.1f) + spec("float", RowLists.floatList :+ 0.1F) spec("int", RowLists.intList :+ 1) } } @@ -79,7 +83,7 @@ final class EnumColumnSpec extends org.specs2.mutable.Specification { } } - spec("float", RowLists.floatList :+ 0.1f) + spec("float", RowLists.floatList :+ 0.1F) spec("int", RowLists.intList :+ 1) } } @@ -121,7 +125,7 @@ final class EnumColumnSpec extends org.specs2.mutable.Specification { } } - spec("float", RowLists.floatList :+ 0.1f) + spec("float", RowLists.floatList :+ 0.1F) spec("int", RowLists.intList :+ 1) } } @@ -163,7 +167,7 @@ final class EnumColumnSpec extends org.specs2.mutable.Specification { } } - spec("float", RowLists.floatList :+ 0.1f) + spec("float", RowLists.floatList :+ 0.1F) spec("int", RowLists.intList :+ 1) } } diff --git a/enumeratum/src/test/scala/values/ValueEnumColumnSpec.scala b/enumeratum/src/test/scala/values/ValueEnumColumnSpec.scala index a4cba5cb..739b7de6 100644 --- a/enumeratum/src/test/scala/values/ValueEnumColumnSpec.scala +++ b/enumeratum/src/test/scala/values/ValueEnumColumnSpec.scala @@ -28,8 +28,9 @@ final class ValueEnumColumnSpec extends org.specs2.mutable.Specification { "not be parsed as Column from invalid Short representation" in { withQueryResult(RowLists.shortList :+ 0.toShort) { implicit con => - SQL("SELECT v").as(scalar[Drink].single) must throwA[Exception].like { case NonFatal(cause) => - cause mustEqual AnormException(TypeDoesNotMatch(s"Invalid value: 0").message) + SQL("SELECT v").as(scalar[Drink].single) must throwA[Exception].like { + case NonFatal(cause) => + cause mustEqual AnormException(TypeDoesNotMatch(s"Invalid value: 0").message) } } } @@ -40,7 +41,7 @@ final class ValueEnumColumnSpec extends org.specs2.mutable.Specification { SQL("SELECT v").as(scalar[Drink].single) must throwA[AnormException] } - spec("float", RowLists.floatList :+ 0.12f) + spec("float", RowLists.floatList :+ 0.12F) spec("String", RowLists.stringList :+ "foo") } } diff --git a/postgres/src/main/scala/package.scala b/postgres/src/main/scala/package.scala index ef490756..8c2e447c 100644 --- a/postgres/src/main/scala/package.scala +++ b/postgres/src/main/scala/package.scala @@ -18,12 +18,15 @@ sealed trait PGJson { // Could be moved to a separate module /** Allows to pass a `JsValue` as parameter to be stored as `PGobject`. */ - implicit def jsValueToStatement[J <: JsValue] = ToStatement[J] { (s, i, js) => - val pgObject = new PGobject() - pgObject.setType(JsValueParameterMetaData.sqlType) - pgObject.setValue(Json.stringify(js)) - s.setObject(i, pgObject, JsValueParameterMetaData.jdbcType) - } + implicit def jsValueToStatement[J <: JsValue]: ToStatement[J] = + ToStatement[J] { (s, i, js) => + val pgObject = new PGobject() + + pgObject.setType(JsValueParameterMetaData.sqlType) + pgObject.setValue(Json.stringify(js)) + + s.setObject(i, pgObject, JsValueParameterMetaData.jdbcType) + } implicit object JsObjectParameterMetaData extends ParameterMetaData[JsObject] { val sqlType = "JSONB" diff --git a/postgres/src/test/scala/ParameterMetaDataSpec.scala b/postgres/src/test/scala/ParameterMetaDataSpec.scala index 7d3f6af5..0369a43e 100644 --- a/postgres/src/test/scala/ParameterMetaDataSpec.scala +++ b/postgres/src/test/scala/ParameterMetaDataSpec.scala @@ -2,8 +2,8 @@ import play.api.libs.json.{ JsObject, JsValue } import anorm.ParameterMetaData -class ParameterMetaDataSpec extends org.specs2.mutable.Specification { - "Parameter metadata" title +final class ParameterMetaDataSpec extends org.specs2.mutable.Specification { + "Parameter metadata".title import anorm.postgresql._ diff --git a/postgres/src/test/scala/PostgreSQLSpec.scala b/postgres/src/test/scala/PostgreSQLSpec.scala index 647741c9..84bff900 100644 --- a/postgres/src/test/scala/PostgreSQLSpec.scala +++ b/postgres/src/test/scala/PostgreSQLSpec.scala @@ -19,8 +19,8 @@ import org.postgresql.util.PGobject import postgresql._ import AcolyteDSL.{ handleStatement, withQueryResult } -class PostgreSQLSpec extends org.specs2.mutable.Specification { - "PostgreSQL support" title +final class PostgreSQLSpec extends org.specs2.mutable.Specification { + "PostgreSQL support".title import acolyte.jdbc.Implicits._ @@ -38,39 +38,40 @@ class PostgreSQLSpec extends org.specs2.mutable.Specification { } def con = AcolyteDSL.connection(handleStatement.withUpdateHandler { - case UpdateExecution(ExpectedStmt, P(`id`) :: DefinedParameter(JsVal, JsDef) :: Nil) => { - + case UpdateExecution(ExpectedStmt, P(`id`) :: DefinedParameter(JsVal, JsDef) :: Nil) => 1 // update count - } + + case ex => + sys.error(s"Unexpected execution: $ex") }) f(con) } "when is an object" in { - withPgo("foo", """{"bar":1}""") { implicit con => + withPgo("foo", """{"bar":1}""") { implicit con: Connection => SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, ${Json.obj("bar" -> 1)})""".executeUpdate() must_=== 1 } } "when is a string" in { - withPgo("foo", "\"bar\"") { implicit con => + withPgo("foo", "\"bar\"") { implicit con: Connection => SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, ${Json.toJson("bar")})""".executeUpdate() must_=== 1 } } "when is a number" in { - withPgo("foo", "3") { implicit con => + withPgo("foo", "3") { implicit con: Connection => SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, ${Json.toJson(3L)})""".executeUpdate() must_=== 1 } } "using JSON writer" >> { - "for pure value" in withPgo("foo", "2") { implicit con => + "for pure value" in withPgo("foo", "2") { implicit con: Connection => SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, ${asJson[TestEnum](Lorem)})""".executeUpdate() must_=== 1 } - "for some optional value" in withPgo("foo", "2") { implicit con => + "for some optional value" in withPgo("foo", "2") { implicit con: Connection => SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, ${asNullableJson[TestEnum](Some(Lorem))})""" .executeUpdate() must_=== 1 } @@ -78,11 +79,13 @@ class PostgreSQLSpec extends org.specs2.mutable.Specification { "for missing optional value" in { val id = "foo" - implicit def con = + implicit def con: Connection = AcolyteDSL.connection(handleStatement.withUpdateHandler { - case UpdateExecution(ExpectedStmt, P(`id`) :: DefinedParameter(null, JsDef) :: Nil) => { + case UpdateExecution(ExpectedStmt, P(`id`) :: DefinedParameter(null, JsDef) :: Nil) => 1 // update count - } + + case ex => + sys.error(s"Unexpected execution: $ex") }) SQL"""INSERT INTO test(id, json) VALUES (${id}, ${asNullableJson[TestEnum](Option.empty[TestEnum])})""" @@ -101,15 +104,15 @@ class PostgreSQLSpec extends org.specs2.mutable.Specification { } val jsVal = Json.obj("bar" -> 1) - "successfully" in withQueryResult(table :+ jsonb) { implicit con => + "successfully" in withQueryResult(table :+ jsonb) { implicit con: Connection => SQL"SELECT json FROM test".as(SqlParser.scalar[JsValue].single) must_=== jsVal } - "successfully as JsObject" in withQueryResult(table :+ jsonb) { implicit con => + "successfully as JsObject" in withQueryResult(table :+ jsonb) { implicit con: Connection => SQL"SELECT json FROM test".as(SqlParser.scalar[JsObject].single) must_=== jsVal } - "successfully using a Reads" in withQueryResult(table :+ jsonb) { implicit con => + "successfully using a Reads" in withQueryResult(table :+ jsonb) { implicit con: Connection => SQL"SELECT json FROM test".as(SqlParser.scalar(fromJson[TestEnum]).single) must_=== Bar } } @@ -117,7 +120,7 @@ class PostgreSQLSpec extends org.specs2.mutable.Specification { "UUID" should { "be passed as PostgreSQL UUID" in { - implicit val con = AcolyteDSL.connection(handleStatement.withUpdateHandler { + implicit val con: Connection = AcolyteDSL.connection(handleStatement.withUpdateHandler { case UpdateExecution( "INSERT INTO test_seq VALUES(?::UUID)", DefinedParameter(uuid: String, ParameterMetaData.Str) :: Nil diff --git a/project/Common.scala b/project/Common.scala index 31cafcb2..e53aaa9b 100644 --- a/project/Common.scala +++ b/project/Common.scala @@ -10,42 +10,46 @@ object Common extends AutoPlugin { override def trigger = allRequirements override def requires = JvmPlugin - val previousVersion = "2.6.0" + val previousVersion = "2.6.10" override def projectSettings = Seq( organization := "org.playframework.anorm", scalaVersion := "2.12.16", - crossScalaVersions := Seq("2.11.12", scalaVersion.value, "2.13.8"), + crossScalaVersions := Seq("2.11.12", scalaVersion.value, "2.13.8", "3.1.3"), (Compile / unmanagedSourceDirectories) ++= { val sv = scalaVersion.value - Seq( - scala2Unmanaged(sv, 12, (Compile / sourceDirectory).value), - scala2Unmanaged(sv, 13, (Compile / sourceDirectory).value) - ) + scalaUnmanaged(sv, (Compile / sourceDirectory).value) }, - (Test / unmanagedSourceDirectories) += scala2Unmanaged(scalaVersion.value, 12, (Test / sourceDirectory).value), - libraryDependencies ++= { - val silencerVer = "1.7.9" - - Seq( - compilerPlugin(("com.github.ghik" %% "silencer-plugin" % silencerVer).cross(CrossVersion.full)), - ("com.github.ghik" %% "silencer-lib" % silencerVer % Provided).cross(CrossVersion.full) - ) + (Test / unmanagedSourceDirectories) ++= scalaUnmanaged(scalaVersion.value, (Test / sourceDirectory).value), + ThisBuild / libraryDependencies ++= { + if (scalaBinaryVersion.value != "3") { + val silencerVersion = "1.7.9" + + Seq( + compilerPlugin( + ("com.github.ghik" %% "silencer-plugin" % silencerVersion) + .cross(CrossVersion.full) + ), + ("com.github.ghik" %% "silencer-lib" % silencerVersion % Provided) + .cross(CrossVersion.full) + ) + } else Seq.empty + }, + scalacOptions ++= Seq("-Xfatal-warnings"), + scalacOptions ++= { + if (scalaBinaryVersion.value != "3") { + Seq("-target:jvm-1.8", "-Xlint", "-g:vars") + } else { + Seq.empty + } }, - scalacOptions ++= Seq( - "-encoding", - "UTF-8", - "-target:jvm-1.8", - "-unchecked", - "-deprecation", - "-feature", - "-Xfatal-warnings", - "-Xlint", - "-g:vars" - ), scalacOptions ++= { - if (scalaBinaryVersion.value == "2.12") { + val v = scalaBinaryVersion.value + + if (v == "3") { + Seq("-explaintypes", "-Werror") + } else if (v == "2.12") { Seq( "-Xmax-classfile-name", "128", @@ -57,17 +61,17 @@ object Common extends AutoPlugin { "-Ywarn-unused-import", "-Ywarn-macros:after" ) - } else if (scalaBinaryVersion.value == "2.11") { + } else if (v == "2.11") { Seq("-Xmax-classfile-name", "128", "-Yopt:_", "-Ydead-code", "-Yclosure-elim", "-Yconst-opt") } else { Seq( "-explaintypes", "-Werror", + "-Wunused", "-Wnumeric-widen", "-Wdead-code", "-Wvalue-discard", "-Wextra-implicit", - "-Wunused", "-Wmacros:after" ) } @@ -78,7 +82,10 @@ object Common extends AutoPlugin { Test / console / scalacOptions ~= { _.filterNot { opt => opt.startsWith("-X") || opt.startsWith("-Y") } }, - Test / scalacOptions ++= Seq("-Yrangepos"), + Test / scalacOptions ++= { + if (scalaBinaryVersion.value != "3") Seq("-Yrangepos") + else Seq.empty + }, Test / scalacOptions ~= (_.filterNot(_ == "-Werror")), scalacOptions ~= (_.filterNot(_ == "-Xfatal-warnings")), Test / fork := true, @@ -94,10 +101,22 @@ object Common extends AutoPlugin { @inline def incoRet(n: String) = ProblemFilters.exclude[IncompatibleResultTypeProblem](n) - def scala2Unmanaged(ver: String, minor: Int, base: File): File = + def scalaUnmanaged(ver: String, base: File): Seq[File] = CrossVersion.partialVersion(ver) match { - case Some((2, n)) if n >= minor => base / s"scala-2.${minor}+" - case _ => base / s"scala-2.${minor}-" + case Some((2, 11)) => + Seq(base / "scala-2.12-", base / "scala-2.13-") + + case Some((2, 12)) => + Seq(base / "scala-2.12+", base / "scala-2.13-") + + case Some((3, _) | (2, 13)) => + Seq(base / "scala-2.12+", base / "scala-2.13+") + + case Some((_, minor)) => + Seq(base / s"scala-2.${minor}-") + + case _ => + sys.error(s"Unexpected version: $ver") } } diff --git a/project/Scapegoat.scala b/project/Scapegoat.scala deleted file mode 100644 index 3d35291e..00000000 --- a/project/Scapegoat.scala +++ /dev/null @@ -1,42 +0,0 @@ -import sbt.Keys._ -import sbt._ - -import com.sksamuel.scapegoat.sbt.ScapegoatSbtPlugin - -object Scapegoat { - import ScapegoatSbtPlugin.autoImport._ - - val settings = Seq( - ThisBuild / scapegoatVersion := "1.4.15", - ThisBuild / scapegoatReports := Seq("text"), - ThisBuild / scapegoatDisabledInspections := Seq("FinalModifierOnCaseClass"), - pomPostProcess := transformPomDependencies { dep => - if ((dep \ "groupId").text == "com.sksamuel.scapegoat") { - None - } else Some(dep) - } - ) - - import scala.xml.{ Elem => XmlElem, Node => XmlNode } - private def transformPomDependencies(tx: XmlElem => Option[XmlNode]): XmlNode => XmlNode = { node: XmlNode => - import scala.xml.{ NodeSeq, XML } - import scala.xml.transform.{ RewriteRule, RuleTransformer } - - val tr = new RuleTransformer(new RewriteRule { - override def transform(node: XmlNode): NodeSeq = node match { - case e: XmlElem if e.label == "dependency" => - tx(e) match { - case Some(n) => n - case _ => NodeSeq.Empty - } - - case _ => node - } - }) - - tr.transform(node).headOption match { - case Some(transformed) => transformed - case _ => sys.error("Fails to transform the POM") - } - } -} diff --git a/project/plugins.sbt b/project/plugins.sbt index bfeafa98..cf1c0ad3 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,8 +7,6 @@ addSbtPlugin("com.typesafe.play" % "interplay" % sys.props.get("interplay.versio addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") -addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.1") - addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.2.0") diff --git a/tokenizer/src/main/scala/anorm/TokenizedStatement.scala b/tokenizer/src/main/scala-2/anorm/TokenizedStatement.scala similarity index 97% rename from tokenizer/src/main/scala/anorm/TokenizedStatement.scala rename to tokenizer/src/main/scala-2/anorm/TokenizedStatement.scala index 011f189a..612fb870 100644 --- a/tokenizer/src/main/scala/anorm/TokenizedStatement.scala +++ b/tokenizer/src/main/scala-2/anorm/TokenizedStatement.scala @@ -90,8 +90,9 @@ private[anorm] object TokenizedStatement { val groups = (gs match { case TokenGroup(List(StringToken("")), None) :: tgs => tgs // trim end case _ => gs - }).collect { case TokenGroup(pr, pl) => - TokenGroup(pr.reverse, pl) + }).collect { + case TokenGroup(pr, pl) => + TokenGroup(pr.reverse, pl) }.reverse TokenizedStatement(groups, ns.reverse) -> m diff --git a/tokenizer/src/main/scala-3/anorm/TokenizedStatement.scala b/tokenizer/src/main/scala-3/anorm/TokenizedStatement.scala new file mode 100644 index 00000000..9e12dc0d --- /dev/null +++ b/tokenizer/src/main/scala-3/anorm/TokenizedStatement.scala @@ -0,0 +1,112 @@ +package anorm + +/** + * @param tokens the token groups + * @param names the binding names of parsed placeholders + */ +private[anorm] case class TokenizedStatement(tokens: Seq[TokenGroup], names: Seq[String]) + +private[anorm] object TokenizedStatement { + import scala.quoted.{ Expr, FromExpr, Quotes, Type } + + /** Returns empty tokenized statement. */ + lazy val empty = TokenizedStatement(Nil, Nil) + + /** String interpolation to tokenize statement. */ + inline def stringInterpolation[T]( + inline parts: Seq[String], + inline params: Seq[T & Show] + ): (TokenizedStatement, Map[String, T]) = ${ tokenizeImpl[T]('parts, 'params) } + + /** Tokenization macro */ + private def tokenizeImpl[T]( + parts: Expr[Seq[String]], + params: Expr[Seq[T & Show]] + )(using Quotes, Type[T]): Expr[(TokenizedStatement, Map[String, T])] = '{ + val _parts = ${ parts } + val _params = ${ params } + + tokenize(Iterator[String](), Nil, _parts, _params, Nil, Nil, Map.empty[String, T]) + } + + @annotation.tailrec + private[anorm] def tokenize[T]( + ti: Iterator[String], + tks: List[StatementToken], + parts: Seq[String], + ps: Seq[T with Show], + gs: Seq[TokenGroup], + ns: Seq[String], + m: Map[String, T] + ): (TokenizedStatement, Map[String, T]) = if (ti.hasNext) { + tokenize(ti, StringToken(ti.next()) :: tks, parts, ps, gs, ns, m) + } else { + if (tks.nonEmpty) { + gs match { + case prev :: groups => + ps.headOption match { + case Some(v) => + prev match { + case TokenGroup(StringToken(str) :: gts, pl) if str.endsWith("#") /* escaped part */ => + val before = + if (str == "#") gts + else { + StringToken(str.dropRight(1)) :: gts + } + val ng = TokenGroup( + tks ::: StringToken(v.show) :: + before, + pl + ) + + tokenize(ti, tks.tail, parts, ps.tail, ng :: groups, ns, m) + + case _ => + val ng = TokenGroup(tks, None) + val n = '_'.toString + ns.size + tokenize( + ti, + tks.tail, + parts, + ps.tail, + ng :: prev.copy(placeholder = Some(n)) :: groups, + n +: ns, + m + (n -> v) + ) + } + case _ => + sys.error(s"No parameter value for placeholder: ${gs.size}") + } + case _ => tokenize(ti, tks.tail, parts, ps, List(TokenGroup(tks, None)), ns, m) + } + } else + parts.headOption match { + case Some(part) => + val it = List(part).iterator + + if (!it.hasNext /* empty */ ) { + tokenize(it, List(StringToken("")), parts.tail, ps, gs, ns, m) + } else tokenize(it, tks, parts.tail, ps, gs, ns, m) + + case _ => + val groups = (gs match { + case TokenGroup(List(StringToken("")), None) :: tgs => tgs // trim end + case _ => gs + }).collect { + case TokenGroup(pr, pl) => + TokenGroup(pr.reverse, pl) + }.reverse + + TokenizedStatement(groups, ns.reverse) -> m + } + } + + final class TokenizedStatementShow(subject: TokenizedStatement) extends Show { + def show = subject.tokens.map(Show.mkString(_)).mkString + } + + implicit object ShowMaker extends Show.Maker[TokenizedStatement] { + def apply(subject: TokenizedStatement): Show = + new TokenizedStatementShow(subject) + } +}