diff --git a/docs/reference.md b/docs/reference.md index 37591a41..3e1d00d3 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -8068,23 +8068,83 @@ Expr("Hello").contains("ll") Basic operations on all the data types that ScalaSql supports mapping between Database types and Scala types ### DataTypes.constant +This example demonstrates a range of different data types being written +and read back via ScalaSQL + +```scala +object MyEnum extends Enumeration { + val foo, bar, baz = Value + + implicit def make: String => Value = withName +} +case class DataTypes[+T[_]]( + myTinyInt: T[Byte], + mySmallInt: T[Short], + myInt: T[Int], + myBigInt: T[Long], + myDouble: T[Double], + myBoolean: T[Boolean], + myLocalDate: T[LocalDate], + myLocalTime: T[LocalTime], + myLocalDateTime: T[LocalDateTime], + myInstant: T[Instant], + myVarBinary: T[geny.Bytes], + myUUID: T[java.util.UUID], + myEnum: T[MyEnum.Value] +) + +object DataTypes extends Table[DataTypes] + +val value = DataTypes[Id]( + myTinyInt = 123.toByte, + mySmallInt = 12345.toShort, + myInt = 12345678, + myBigInt = 12345678901L, + myDouble = 3.14, + myBoolean = true, + myLocalDate = LocalDate.parse("2023-12-20"), + myLocalTime = LocalTime.parse("10:15:30"), + myLocalDateTime = LocalDateTime.parse("2011-12-03T10:15:30"), + myInstant = Instant.parse("2011-12-03T10:15:30Z"), + myVarBinary = new geny.Bytes(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)), + myUUID = new java.util.UUID(1234567890L, 9876543210L), + myEnum = MyEnum.bar +) + +db.run( + DataTypes.insert.columns( + _.myTinyInt := value.myTinyInt, + _.mySmallInt := value.mySmallInt, + _.myInt := value.myInt, + _.myBigInt := value.myBigInt, + _.myDouble := value.myDouble, + _.myBoolean := value.myBoolean, + _.myLocalDate := value.myLocalDate, + _.myLocalTime := value.myLocalTime, + _.myLocalDateTime := value.myLocalDateTime, + _.myInstant := value.myInstant, + _.myVarBinary := value.myVarBinary, + _.myUUID := value.myUUID, + _.myEnum := value.myEnum + ) +) ==> 1 + +db.run(DataTypes.select) ==> Seq(value) +``` + + + + + + +### DataTypes.nonRoundTrip + ```scala -DataTypes.insert.columns( - _.myTinyInt := value.myTinyInt, - _.mySmallInt := value.mySmallInt, - _.myInt := value.myInt, - _.myBigInt := value.myBigInt, - _.myDouble := value.myDouble, - _.myBoolean := value.myBoolean, - _.myLocalDate := value.myLocalDate, - _.myLocalTime := value.myLocalTime, - _.myLocalDateTime := value.myLocalDateTime, - _.myInstant := value.myInstant, - _.myVarBinary := value.myVarBinary, - _.myUUID := value.myUUID, - _.myEnum := value.myEnum +NonRoundTripTypes.insert.columns( + _.myOffsetDateTime := value.myOffsetDateTime, + _.myZonedDateTime := value.myZonedDateTime ) ``` @@ -8103,7 +8163,7 @@ DataTypes.insert.columns( ```scala -DataTypes.select +NonRoundTripTypes.select ``` @@ -8111,23 +8171,100 @@ DataTypes.select * ```scala - Seq(value) + Seq(normalize(value)) ``` -### DataTypes.nonRoundTrip +---- + +In general, databases do not store timezones and offsets together with their timestamps: +"TIMESTAMP WITH TIMEZONE" is a lie and it actually stores UTC and renders to whatever +timezone the client queries it from. Thus values of type `OffsetDateTime` can preserve +their instant, but cannot be round-tripped preserving the offset. + +```scala +case class NonRoundTripTypes[+T[_]]( + myZonedDateTime: T[ZonedDateTime], + myOffsetDateTime: T[OffsetDateTime] +) + +object NonRoundTripTypes extends Table[NonRoundTripTypes] + +val value = NonRoundTripTypes[Id]( + myZonedDateTime = ZonedDateTime.parse("2011-12-03T10:15:30+01:00[Europe/Paris]"), + myOffsetDateTime = OffsetDateTime.parse("2011-12-03T10:15:30+00:00") +) + +def normalize(v: NonRoundTripTypes[Id]) = v.copy[Id]( + myZonedDateTime = v.myZonedDateTime.withZoneSameInstant(ZoneId.systemDefault), + myOffsetDateTime = v.myOffsetDateTime.withOffsetSameInstant(OffsetDateTime.now.getOffset) +) + +checker( + query = NonRoundTripTypes.insert.columns( + _.myOffsetDateTime := value.myOffsetDateTime, + _.myZonedDateTime := value.myZonedDateTime + ), + value = 1 +) + +checker( + query = NonRoundTripTypes.select, + value = Seq(normalize(value)), + normalize = (x: Seq[NonRoundTripTypes[Id]]) => x.map(normalize) +) +``` + + + + + + +### DataTypes.enclosing ```scala -NonRoundTripTypes.insert.columns( - _.myOffsetDateTime := value.myOffsetDateTime, - _.myZonedDateTime := value.myZonedDateTime +Enclosing.insert.columns( + _.barId := value1.barId, + _.myString := value1.myString, + _.foo.fooId := value1.foo.fooId, + _.foo.myBoolean := value1.foo.myBoolean ) ``` +* + ```sql + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + ``` + + + +* + ```scala + 1 + ``` + + + +---- + + + +```scala +Enclosing.insert.values(value2) +``` + + +* + ```sql + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + ``` + * @@ -8142,19 +8279,108 @@ NonRoundTripTypes.insert.columns( ```scala -NonRoundTripTypes.select +Enclosing.select ``` +* + ```sql + SELECT + enclosing0.bar_id AS res__bar_id, + enclosing0.my_string AS res__my_string, + enclosing0.foo_id AS res__foo_id, + enclosing0.my_boolean AS res__my_boolean + FROM enclosing enclosing0 + ``` + * ```scala - Seq(normalize(value)) + Seq(value1, value2) ``` +---- + +You can nest `case class`es in other `case class`es to DRY up common sets of +table columns. These nested `case class`es have their columns flattened out +into the enclosing `case class`'s columns, such that at the SQL level it is +all flattened out without nesting. + +```scala +// case class Nested[+T[_]]( +// fooId: T[Int], +// myBoolean: T[Boolean], +// ) +// object Nested extends Table[Nested] +// +// case class Enclosing[+T[_]]( +// barId: T[Int], +// myString: T[String], +// foo: Nested[T] +// ) +// object Enclosing extends Table[Enclosing] +val value1 = Enclosing[Id]( + barId = 1337, + myString = "hello", + foo = Nested[Id]( + fooId = 271828, + myBoolean = true + ) +) +val value2 = Enclosing[Id]( + barId = 31337, + myString = "world", + foo = Nested[Id]( + fooId = 1618, + myBoolean = false + ) +) + +checker( + query = Enclosing.insert.columns( + _.barId := value1.barId, + _.myString := value1.myString, + _.foo.fooId := value1.foo.fooId, + _.foo.myBoolean := value1.foo.myBoolean + ), + sql = """ + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + """, + value = 1 +) + +checker( + query = Enclosing.insert.values(value2), + sql = """ + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + """, + value = 1 +) + +checker( + query = Enclosing.select, + sql = """ + SELECT + enclosing0.bar_id AS res__bar_id, + enclosing0.my_string AS res__my_string, + enclosing0.foo_id AS res__foo_id, + enclosing0.my_boolean AS res__my_boolean + FROM enclosing enclosing0 + """, + value = Seq(value1, value2) +) +``` + + + + + + ## Optional Queries using columns that may be `NULL`, `Expr[Option[T]]` or `Option[T]` in Scala ### Optional diff --git a/docs/tutorial.md b/docs/tutorial.md index c0bb85e1..e32ec075 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -98,9 +98,7 @@ case class Country[+T[_]]( code2: T[String] ) -object Country extends Table[Country]() { - initTableMetadata() -} +object Country extends Table[Country]() case class City[+T[_]]( id: T[Int], @@ -110,9 +108,7 @@ case class City[+T[_]]( population: T[Long] ) -object City extends Table[City]() { - initTableMetadata() -} +object City extends Table[City]() case class CountryLanguage[+T[_]]( countryCode: T[String], @@ -121,9 +117,7 @@ case class CountryLanguage[+T[_]]( percentage: T[Double] ) -object CountryLanguage extends Table[CountryLanguage]() { - initTableMetadata() -} +object CountryLanguage extends Table[CountryLanguage]() ``` ### Creating Your Database Client @@ -1290,7 +1284,7 @@ dbClient.transaction { implicit db => ## Custom Expressions You can define custom SQL expressions via the `Expr` constructor. This is -useful for extending ScalaSql when you need to use some operator or syntax +useful for enclosing ScalaSql when you need to use some operator or syntax that your Database supports but ScalaSql does not have built in. This example shows how to define a custom `rawToHex` Scala function working on `Expr[T]`s, that translates down to the H2 database's `RAWTOHEX` SQL function, and finally @@ -1324,30 +1318,36 @@ the database which are of a type not supported by ScalaSql. The example below demonstrates how to define a custom `CityId` type, define an implicit `TypeMapper` for it, and then `INSERT` it into the database and `SELECT` it out after. + + ```scala case class CityId(value: Int) + object CityId { implicit def tm: TypeMapper[CityId] = new TypeMapper[CityId] { def jdbcType: JDBCType = JDBCType.INTEGER + def get(r: ResultSet, idx: Int): CityId = new CityId(r.getInt(idx)) + def put(r: PreparedStatement, idx: Int, v: CityId): Unit = r.setInt(idx, v.value) } } +``` + + +```scala case class City2[+T[_]]( - id: T[CityId], - name: T[String], - countryCode: T[String], - district: T[String], - population: T[Long] -) + id: T[CityId], + name: T[String], + countryCode: T[String], + district: T[String], + population: T[Long] + ) object City2 extends Table[City2]() { - initTableMetadata() override def tableName: String = "city" } -``` -```scala db.run( City2.insert.columns( _.id := CityId(31337), @@ -1366,21 +1366,22 @@ db.run(City2.select.filter(_.id === 31337).single) ==> ScalaSql allows you to customize the table and column names via overriding `def table` and `def tableColumnNameOverride` om your `Table` object. + + ```scala case class CityCustom[+T[_]]( - idCustom: T[Int], - nameCustom: T[String], - countryCodeCustom: T[String], - districtCustom: T[String], - populationCustom: T[Long] -) + idCustom: T[Int], + nameCustom: T[String], + countryCodeCustom: T[String], + districtCustom: T[String], + populationCustom: T[Long] + ) object CityCustom extends Table[CityCustom]() { - initTableMetadata() override def tableName: String = "city" - override def tableColumnNameOverride(s: String): String = s match{ + override def tableColumnNameOverride(s: String): String = s match { case "idCustom" => "id" case "nameCustom" => "name" case "countryCodeCustom" => "countrycode" @@ -1388,9 +1389,8 @@ object CityCustom extends Table[CityCustom]() { case "populationCustom" => "population" } } -``` -```scala + val query = CityCustom.select db.toSqlQuery(query) ==> """ SELECT diff --git a/scalasql/src-2/utils/TableMacro.scala b/scalasql/src-2/utils/TableMacro.scala index edabc151..fb2edea2 100644 --- a/scalasql/src-2/utils/TableMacro.scala +++ b/scalasql/src-2/utils/TableMacro.scala @@ -14,15 +14,18 @@ object TableMacros { val constructor = weakTypeOf[V[Any]].members.find(_.isConstructor).head val constructorParameters = constructor.info.paramLists.head - val columnParams = for (applyParam <- constructorParameters) yield { - val name = applyParam.name - if (applyParam.name.toString == "foo"){ - val companion = applyParam.info.typeSymbol.companion - q"_root_.scalasql.Table.tableMetadata($companion).vExpr($tableRef, dialect)" + def isTypeParamType(param: Symbol) = { + param.info.typeSymbol.toString != caseClassType.tpe.typeParams.head.toString + } + + val columnParams = for (param <- constructorParameters) yield { + val name = param.name + if (isTypeParamType(param)){ + q"implicitly[scalasql.Table.ImplicitMetadata[${param.info.typeSymbol}]].value.vExpr($tableRef, dialect)" }else { q""" - _root_.scalasql.Column[${applyParam.info.typeArgs.head}]()( + _root_.scalasql.Column[${param.info.typeArgs.head}]()( implicitly, sourcecode.Name( _root_.scalasql.Table.tableColumnNameOverride( @@ -35,27 +38,26 @@ object TableMacros { } } - def subApplyParam(applyParam: Symbol) = { + def subParam(param: Symbol) = { - applyParam.info.substituteTypes( + param.info.substituteTypes( List(constructor.info.resultType.typeArgs.head.typeSymbol), List(typeOf[scalasql.Id[_]].asInstanceOf[ExistentialType].underlying.asInstanceOf[TypeRef].sym.info) ) } val constructParams = for (param <- constructorParameters) yield { - val tpe = subApplyParam(param) + val tpe = subParam(param) q"implicitly[_root_.scalasql.Queryable.Row[_, $tpe]].construct(args): scalasql.Id[$tpe]" } val deconstructParams = for (param <- constructorParameters) yield { - val tpe = subApplyParam(param) + val tpe = subParam(param) q"(v: Any) => implicitly[_root_.scalasql.Queryable.Row[_, $tpe]].deconstruct(v.asInstanceOf[$tpe])" } val flattenLists = for (param <- constructorParameters) yield { - if (param.name.toString == "foo") { - val companion = param.info.typeSymbol.companion - q"_root_.scalasql.Table.tableLabels($companion).map(List(_))" + if (isTypeParamType(param)){ + q"implicitly[scalasql.Table.ImplicitMetadata[${param.info.typeSymbol}]].value.walkLabels0()" }else { val name = param.name q"_root_.scala.List(List(${name.toString}))" diff --git a/scalasql/src/Table.scala b/scalasql/src/Table.scala index 842ba90a..c74445f6 100644 --- a/scalasql/src/Table.scala +++ b/scalasql/src/Table.scala @@ -36,9 +36,12 @@ abstract class Table[V[_[_]]]()(implicit name: sourcecode.Name, metadata0: Table protected[scalasql] def tableLabels: Seq[String] = { tableMetadata.walkLabels0().map(_.head) } + implicit def tableImplicitMetadata: Table.ImplicitMetadata[V] = Table.ImplicitMetadata(tableMetadata) } object Table { + case class ImplicitMetadata[V[_[_]]](value: Metadata[V]) + def tableMetadata[V[_[_]]](t: Table[V]) = t.tableMetadata def tableRef[V[_[_]]](t: Table[V]) = t.tableRef def tableName(t: Table.Base) = t.tableName diff --git a/scalasql/test/resources/h2-customer-schema.sql b/scalasql/test/resources/h2-customer-schema.sql index 97857ac8..13642506 100644 --- a/scalasql/test/resources/h2-customer-schema.sql +++ b/scalasql/test/resources/h2-customer-schema.sql @@ -6,7 +6,7 @@ DROP TABLE IF EXISTS data_types CASCADE; DROP TABLE IF EXISTS non_round_trip_types CASCADE; DROP TABLE IF EXISTS opt_cols CASCADE; DROP TABLE IF EXISTS nested CASCADE; -DROP TABLE IF EXISTS extending CASCADE; +DROP TABLE IF EXISTS enclosing CASCADE; CREATE TABLE buyer ( id INTEGER AUTO_INCREMENT PRIMARY KEY, @@ -70,7 +70,7 @@ CREATE TABLE nested( my_boolean BOOLEAN ); -CREATE TABLE extending( +CREATE TABLE enclosing( bar_id INTEGER, my_string VARCHAR(256), foo_id INTEGER, diff --git a/scalasql/test/resources/mysql-customer-schema.sql b/scalasql/test/resources/mysql-customer-schema.sql index e9ba6aba..557900b9 100644 --- a/scalasql/test/resources/mysql-customer-schema.sql +++ b/scalasql/test/resources/mysql-customer-schema.sql @@ -7,7 +7,7 @@ DROP TABLE IF EXISTS `data_types` CASCADE; DROP TABLE IF EXISTS `non_round_trip_types` CASCADE; DROP TABLE IF EXISTS `opt_cols` CASCADE; DROP TABLE IF EXISTS `nested` CASCADE; -DROP TABLE IF EXISTS `extending` CASCADE; +DROP TABLE IF EXISTS `enclosing` CASCADE; SET FOREIGN_KEY_CHECKS = 1; CREATE TABLE buyer ( @@ -71,7 +71,7 @@ CREATE TABLE nested( my_boolean BOOLEAN ); -CREATE TABLE extending( +CREATE TABLE enclosing( bar_id INTEGER, my_string VARCHAR(256), foo_id INTEGER, diff --git a/scalasql/test/resources/postgres-customer-schema.sql b/scalasql/test/resources/postgres-customer-schema.sql index 44fe3eb7..42169334 100644 --- a/scalasql/test/resources/postgres-customer-schema.sql +++ b/scalasql/test/resources/postgres-customer-schema.sql @@ -6,7 +6,7 @@ DROP TABLE IF EXISTS data_types CASCADE; DROP TABLE IF EXISTS non_round_trip_types CASCADE; DROP TABLE IF EXISTS opt_cols CASCADE; DROP TABLE IF EXISTS nested CASCADE; -DROP TABLE IF EXISTS extending CASCADE; +DROP TABLE IF EXISTS enclosing CASCADE; DROP TYPE IF EXISTS my_enum CASCADE; CREATE TABLE buyer ( @@ -75,7 +75,7 @@ CREATE TABLE nested( my_boolean BOOLEAN ); -CREATE TABLE extending( +CREATE TABLE enclosing( bar_id INTEGER, my_string VARCHAR(256), foo_id INTEGER, diff --git a/scalasql/test/resources/sqlite-customer-schema.sql b/scalasql/test/resources/sqlite-customer-schema.sql index f3cb175e..2a03071e 100644 --- a/scalasql/test/resources/sqlite-customer-schema.sql +++ b/scalasql/test/resources/sqlite-customer-schema.sql @@ -5,7 +5,7 @@ DROP TABLE IF EXISTS purchase; DROP TABLE IF EXISTS data_types; DROP TABLE IF EXISTS non_round_trip_types; DROP TABLE IF EXISTS nested; -DROP TABLE IF EXISTS extending; +DROP TABLE IF EXISTS enclosing; DROP TABLE IF EXISTS opt_cols; CREATE TABLE buyer ( @@ -72,7 +72,7 @@ CREATE TABLE nested( my_boolean BOOLEAN ); -CREATE TABLE extending( +CREATE TABLE enclosing( bar_id INTEGER, my_string VARCHAR(256), foo_id INTEGER, diff --git a/scalasql/test/src/WorldSqlTests.scala b/scalasql/test/src/WorldSqlTests.scala index ad41eb58..a9e47362 100644 --- a/scalasql/test/src/WorldSqlTests.scala +++ b/scalasql/test/src/WorldSqlTests.scala @@ -1372,7 +1372,7 @@ object WorldSqlTests extends TestSuite { // ## Custom Expressions // // You can define custom SQL expressions via the `Expr` constructor. This is - // useful for extending ScalaSql when you need to use some operator or syntax + // useful for enclosing ScalaSql when you need to use some operator or syntax // that your Database supports but ScalaSql does not have built in. This example // shows how to define a custom `rawToHex` Scala function working on `Expr[T]`s, // that translates down to the H2 database's `RAWTOHEX` SQL function, and finally diff --git a/scalasql/test/src/datatypes/DataTypesTests.scala b/scalasql/test/src/datatypes/DataTypesTests.scala index 753dd57c..01cf5930 100644 --- a/scalasql/test/src/datatypes/DataTypesTests.scala +++ b/scalasql/test/src/datatypes/DataTypesTests.scala @@ -16,12 +16,12 @@ case class Nested[+T[_]]( ) object Nested extends Table[Nested] -case class Extending[+T[_]]( +case class Enclosing[+T[_]]( barId: T[Int], myString: T[String], foo: Nested[T] ) -object Extending extends Table[Extending] +object Enclosing extends Table[Enclosing] trait DataTypesTests extends ScalaSqlSuite { @@ -139,12 +139,27 @@ trait DataTypesTests extends ScalaSqlSuite { } ) - test("extending") - checker.recorded ( + test("enclosing") - checker.recorded ( """ - + You can nest `case class`es in other `case class`es to DRY up common sets of + table columns. These nested `case class`es have their columns flattened out + into the enclosing `case class`'s columns, such that at the SQL level it is + all flattened out without nesting. """, Text { - val value1 = Extending[Id]( + // case class Nested[+T[_]]( + // fooId: T[Int], + // myBoolean: T[Boolean], + // ) + // object Nested extends Table[Nested] + // + // case class Enclosing[+T[_]]( + // barId: T[Int], + // myString: T[String], + // foo: Nested[T] + // ) + // object Enclosing extends Table[Enclosing] + val value1 = Enclosing[Id]( barId = 1337, myString = "hello", foo = Nested[Id]( @@ -152,7 +167,7 @@ trait DataTypesTests extends ScalaSqlSuite { myBoolean = true ) ) - val value2 = Extending[Id]( + val value2 = Enclosing[Id]( barId = 31337, myString = "world", foo = Nested[Id]( @@ -162,22 +177,38 @@ trait DataTypesTests extends ScalaSqlSuite { ) checker( - query = Extending.insert.columns( + query = Enclosing.insert.columns( _.barId := value1.barId, _.myString := value1.myString, _.foo.fooId := value1.foo.fooId, _.foo.myBoolean := value1.foo.myBoolean, ), + sql = """ + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + """, value = 1 ) checker( - query = Extending.insert.values(value2), + query = Enclosing.insert.values(value2), + sql = """ + INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) + VALUES (?, ?, ?, ?) + """, value = 1 ) checker( - query = Extending.select, + query = Enclosing.select, + sql = """ + SELECT + enclosing0.bar_id AS res__bar_id, + enclosing0.my_string AS res__my_string, + enclosing0.foo_id AS res__foo_id, + enclosing0.my_boolean AS res__my_boolean + FROM enclosing enclosing0 + """, value = Seq(value1, value2) ) }