Skip to content

Commit

Permalink
readme
Browse files Browse the repository at this point in the history
  • Loading branch information
lihaoyi committed Nov 22, 2023
1 parent d9cc3e6 commit 89c61dd
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 20 deletions.
33 changes: 23 additions & 10 deletions docs/reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -7034,7 +7034,9 @@ OptCols.insert.batched(_.myInt, _.myInt2)(
### Optional.selectAll
Nullable columns are modelled as `T[Option[V]]` fields on your `case class`,
and are returned to you as `Option[V]` values when you run a query. These
can be `Some` or `None`
```scala
OptCols.select
Expand Down Expand Up @@ -7065,7 +7067,7 @@ OptCols.select
### Optional.groupByMaxGet
Some aggregates return `Expr[Option[V]]`s, et.c. `.maxByOpt`
```scala
OptCols.select.groupBy(_.myInt)(_.maxByOpt(_.myInt2.get))
Expand All @@ -7090,7 +7092,8 @@ OptCols.select.groupBy(_.myInt)(_.maxByOpt(_.myInt2.get))
### Optional.isDefined
`.isDefined` on `Expr[Option[V]]` translates to a SQL
`IS NOT NULL` check
```scala
OptCols.select.filter(_.myInt.isDefined)
Expand All @@ -7117,7 +7120,8 @@ OptCols.select.filter(_.myInt.isDefined)
### Optional.isEmpty
`.isEmpty` on `Expr[Option[V]]` translates to a SQL
`IS NULL` check
```scala
OptCols.select.filter(_.myInt.isEmpty)
Expand All @@ -7144,7 +7148,9 @@ OptCols.select.filter(_.myInt.isEmpty)
### Optional.sqlEquals.nonOptionHit
Backticked `=` equality in ScalaSQL translates to a raw `=`
in SQL. This follows SQL `NULL` semantics, meaning that
`None = None` returns `false` rather than `true`
```scala
OptCols.select.filter(_.myInt `=` 1)
Expand Down Expand Up @@ -7225,7 +7231,9 @@ OptCols.select.filter(_.myInt `=` Option.empty[Int])
### Optional.scalaEquals.someHit
`===` equality in ScalaSQL translates to a `IS NOT DISTINCT` in SQL.
This roughly follows Scala `==` semantics, meaning `None === None`
returns `true`
```scala
OptCols.select.filter(_.myInt === Option(1))
Expand Down Expand Up @@ -7340,7 +7348,9 @@ OptCols.select.filter(_.myInt !== Option.empty[Int])
### Optional.map
You can use operators like `.map` and `.flatMap` to work with
your `Expr[Option[V]]` values. These roughly follow the semantics
that you would be familiar with from Scala.
```scala
OptCols.select.map(d => d.copy[Expr](myInt = d.myInt.map(_ + 10)))
Expand Down Expand Up @@ -7427,7 +7437,10 @@ OptCols.select
### Optional.mapGet
You can use `.get` to turn an `Expr[Option[V]]` into an `Expr[V]`. This follows
SQL semantics, such that `NULL`s anywhere in that selected column automatically
will turn the whole column `None` (if it's an `Expr[Option[V]]` column) or `null`
(if it's not an optional column)
```scala
OptCols.select.map(d => d.copy[Expr](myInt = d.myInt.map(_ + d.myInt2.get + 1)))
Expand Down Expand Up @@ -7553,7 +7566,7 @@ OptCols.select.map(d => d.copy[Expr](myInt = d.myInt.orElse(d.myInt2)))
### Optional.filter
`.filter` follows normal Scala semantics, and translates to a `CASE`/`WHEN (foo)`/`ELSE NULL`
```scala
OptCols.select.map(d => d.copy[Expr](myInt = d.myInt.filter(_ < 2)))
Expand Down Expand Up @@ -7587,7 +7600,7 @@ OptCols.select.map(d => d.copy[Expr](myInt = d.myInt.filter(_ < 2)))
### Optional.sorting.nullsLast
`.nullsLast` and `.nullsFirst` translate to SQL `NULLS LAST` and `NULLS FIRST` clauses
```scala
OptCols.select.sortBy(_.myInt).nullsLast
Expand Down
63 changes: 53 additions & 10 deletions scalasql/test/src/datatypes/OptionalTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,12 @@ trait OptionalTests extends ScalaSqlSuite {
OptCols[Id](Some(1), Some(2)),
OptCols[Id](Some(3), None),
OptCols[Id](None, Some(4))
)
),
docs = """
Nullable columns are modelled as `T[Option[V]]` fields on your `case class`,
and are returned to you as `Option[V]` values when you run a query. These
can be `Some` or `None`
"""
)

test("groupByMaxGet") - checker(
Expand All @@ -54,7 +59,10 @@ trait OptionalTests extends ScalaSqlSuite {
GROUP BY opt_cols0.my_int
""",
value = Seq(None -> Some(4), Some(1) -> Some(2), Some(3) -> None),
normalize = (x: Seq[(Option[Int], Option[Int])]) => x.sorted
normalize = (x: Seq[(Option[Int], Option[Int])]) => x.sorted,
docs = """
Some aggregates return `Expr[Option[V]]`s, et.c. `.maxByOpt`
"""
)

test("isDefined") - checker(
Expand All @@ -65,7 +73,11 @@ trait OptionalTests extends ScalaSqlSuite {
opt_cols0.my_int2 AS res__my_int2
FROM opt_cols opt_cols0
WHERE (opt_cols0.my_int IS NOT NULL)""",
value = Seq(OptCols[Id](Some(1), Some(2)), OptCols[Id](Some(3), None))
value = Seq(OptCols[Id](Some(1), Some(2)), OptCols[Id](Some(3), None)),
docs = """
`.isDefined` on `Expr[Option[V]]` translates to a SQL
`IS NOT NULL` check
"""
)

test("isEmpty") - checker(
Expand All @@ -76,7 +88,11 @@ trait OptionalTests extends ScalaSqlSuite {
opt_cols0.my_int2 AS res__my_int2
FROM opt_cols opt_cols0
WHERE (opt_cols0.my_int IS NULL)""",
value = Seq(OptCols[Id](None, None), OptCols[Id](None, Some(4)))
value = Seq(OptCols[Id](None, None), OptCols[Id](None, Some(4))),
docs ="""
`.isEmpty` on `Expr[Option[V]]` translates to a SQL
`IS NULL` check
"""
)

test("sqlEquals") {
Expand All @@ -89,7 +105,12 @@ trait OptionalTests extends ScalaSqlSuite {
FROM opt_cols opt_cols0
WHERE (opt_cols0.my_int = ?)
""",
value = Seq(OptCols[Id](Some(1), Some(2)))
value = Seq(OptCols[Id](Some(1), Some(2))),
docs = """
Backticked `=` equality in ScalaSQL translates to a raw `=`
in SQL. This follows SQL `NULL` semantics, meaning that
`None = None` returns `false` rather than `true`
"""
)

test("nonOptionMiss") - checker(
Expand Down Expand Up @@ -136,7 +157,12 @@ trait OptionalTests extends ScalaSqlSuite {
WHERE (opt_cols0.my_int <=> ?)
"""
),
value = Seq(OptCols[Id](Some(1), Some(2)))
value = Seq(OptCols[Id](Some(1), Some(2))),
docs = """
`===` equality in ScalaSQL translates to a `IS NOT DISTINCT` in SQL.
This roughly follows Scala `==` semantics, meaning `None === None`
returns `true`
"""
)

test("noneHit") - checker(
Expand Down Expand Up @@ -231,7 +257,12 @@ trait OptionalTests extends ScalaSqlSuite {
OptCols[Id](Some(11), Some(2)),
OptCols[Id](Some(13), None),
OptCols[Id](None, Some(4))
)
),
docs = """
You can use operators like `.map` and `.flatMap` to work with
your `Expr[Option[V]]` values. These roughly follow the semantics
that you would be familiar with from Scala.
"""
)

test("map2") - checker(
Expand Down Expand Up @@ -276,7 +307,13 @@ trait OptionalTests extends ScalaSqlSuite {
// because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too
OptCols[Id](None, None),
OptCols[Id](None, Some(4))
)
),
docs = """
You can use `.get` to turn an `Expr[Option[V]]` into an `Expr[V]`. This follows
SQL semantics, such that `NULL`s anywhere in that selected column automatically
will turn the whole column `None` (if it's an `Expr[Option[V]]` column) or `null`
(if it's not an optional column)
"""
)

test("rawGet") - checker(
Expand Down Expand Up @@ -346,7 +383,10 @@ trait OptionalTests extends ScalaSqlSuite {
OptCols[Id](Some(1), Some(2)),
OptCols[Id](None, None),
OptCols[Id](None, Some(4))
)
),
docs = """
`.filter` follows normal Scala semantics, and translates to a `CASE`/`WHEN (foo)`/`ELSE NULL`
"""
)
test("sorting") {
test("nullsLast") - checker(
Expand All @@ -368,7 +408,10 @@ trait OptionalTests extends ScalaSqlSuite {
OptCols[Id](Some(3), None),
OptCols[Id](None, None),
OptCols[Id](None, Some(4))
)
),
docs = """
`.nullsLast` and `.nullsFirst` translate to SQL `NULLS LAST` and `NULLS FIRST` clauses
"""
)
test("nullsFirst") - checker(
query = Text { OptCols.select.sortBy(_.myInt).nullsFirst },
Expand Down

0 comments on commit 89c61dd

Please sign in to comment.