diff --git a/.mill-version b/.mill-version index ac454c6a..54dbed47 100644 --- a/.mill-version +++ b/.mill-version @@ -1 +1 @@ -0.12.0 +0.12.10 diff --git a/.scalafmt.conf b/.scalafmt.conf index 59dd68bb..2b151fef 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version = "3.8.1" +version = "3.9.5" align.preset = none align.openParenCallSite = false diff --git a/build.mill b/build.mill index 399f4dd3..99569107 100644 --- a/build.mill +++ b/build.mill @@ -8,10 +8,11 @@ import de.tobiasroeser.mill.vcs.version.VcsVersion import com.goyeau.mill.scalafix.ScalafixModule import mill._, scalalib._, publish._ -val scalaVersions = Seq("2.13.12", "3.6.2") +val scala3 = "3.6.2" +val scalaVersions = Seq("2.13.12", scala3) +val scala3NamedTuples = "3.7.0" -trait Common extends CrossScalaModule with PublishModule with ScalafixModule{ - def scalaVersion = crossScalaVersion +trait CommonBase extends ScalaModule with PublishModule with ScalafixModule { common => def publishVersion = VcsVersion.vcsState().format() @@ -33,18 +34,13 @@ trait Common extends CrossScalaModule with PublishModule with ScalafixModule{ Seq("-Wunused:privates,locals,explicits,implicits,params") ++ Option.when(scalaVersion().startsWith("2."))("-Xsource:3") } -} - - -object scalasql extends Cross[ScalaSql](scalaVersions) -trait ScalaSql extends Common{ common => - def moduleDeps = Seq(query, operations) - def ivyDeps = Agg.empty[Dep] ++ Option.when(scalaVersion().startsWith("2."))( - ivy"org.scala-lang:scala-reflect:${scalaVersion()}" - ) + def semanticDbVersion: T[String] = + // last version that works with Scala 2.13.12 + "4.12.3" - object test extends ScalaTests with ScalafixModule{ + trait CommonTest extends ScalaTests with ScalafixModule { + def semanticDbVersion: T[String] = common.semanticDbVersion def scalacOptions = common.scalacOptions def ivyDeps = Agg( ivy"com.github.vertical-blank:sql-formatter:2.0.4", @@ -61,10 +57,51 @@ trait ScalaSql extends Common{ common => ivy"com.zaxxer:HikariCP:5.1.0" ) + def recordedTestsFile: String + def recordedSuiteDescriptionsFile: String + def testFramework = "scalasql.UtestFramework" def forkArgs = Seq("-Duser.timezone=Asia/Singapore") - def forkEnv = Map("MILL_WORKSPACE_ROOT" -> T.workspace.toString()) + + def forkEnv = Map( + "MILL_WORKSPACE_ROOT" -> T.workspace.toString(), + "SCALASQL_RECORDED_TESTS_NAME" -> recordedTestsFile, + "SCALASQL_RECORDED_SUITE_DESCRIPTIONS_NAME" -> recordedSuiteDescriptionsFile + ) + } +} +trait Common extends CommonBase with CrossScalaModule + +object `scalasql-namedtuples` extends CommonBase { + def scalaVersion: T[String] = scala3NamedTuples + def millSourcePath: os.Path = scalasql(scala3).millSourcePath / "namedtuples" + def moduleDeps: Seq[PublishModule] = Seq(scalasql(scala3)) + + // override def scalacOptions: Target[Seq[String]] = T { + // super.scalacOptions() :+ "-Xprint:inlining" + // } + + object test extends CommonTest { + def resources = scalasql(scala3).test.resources + def moduleDeps = super.moduleDeps ++ Seq(scalasql(scala3), scalasql(scala3).test) + def recordedTestsFile: String = "recordedTestsNT.json" + def recordedSuiteDescriptionsFile: String = "recordedSuiteDescriptionsNT.json" + } +} + +object scalasql extends Cross[ScalaSql](scalaVersions) +trait ScalaSql extends Common { common => + def moduleDeps = Seq(query, operations) + def ivyDeps = Agg.empty[Dep] ++ Option.when(scalaVersion().startsWith("2."))( + ivy"org.scala-lang:scala-reflect:${scalaVersion()}" + ) + + override def consoleScalacOptions: T[Seq[String]] = Seq("-Xprint:typer") + + object test extends CommonTest { + def recordedTestsFile: String = "recordedTests.json" + def recordedSuiteDescriptionsFile: String = "recordedSuiteDescriptions.json" } private def indent(code: Iterable[String]): String = @@ -74,7 +111,7 @@ trait ScalaSql extends Common{ common => def ivyDeps = Agg( ivy"com.lihaoyi::geny:1.0.0", ivy"com.lihaoyi::sourcecode:0.3.1", - ivy"com.lihaoyi::pprint:0.8.1", + ivy"com.lihaoyi::pprint:0.8.1" ) ++ Option.when(scalaVersion().startsWith("2."))( ivy"org.scala-lang:scala-reflect:${scalaVersion()}" ) @@ -82,7 +119,6 @@ trait ScalaSql extends Common{ common => def generatedSources = T { def commaSep0(i: Int, f: Int => String) = Range.inclusive(1, i).map(f).mkString(", ") - val queryableRowDefs = for (i <- Range.inclusive(2, 22)) yield { def commaSep(f: Int => String) = commaSep0(i, f) s"""implicit def Tuple${i}Queryable[${commaSep(j => s"Q$j")}, ${commaSep(j => s"R$j")}]( @@ -98,7 +134,6 @@ trait ScalaSql extends Common{ common => |}""".stripMargin } - os.write( T.dest / "Generated.scala", s"""package scalasql.core.generated @@ -113,15 +148,13 @@ trait ScalaSql extends Common{ common => } - - object operations extends Common with CrossValue{ + object operations extends Common with CrossValue { def moduleDeps = Seq(core) } - object query extends Common with CrossValue{ + object query extends Common with CrossValue { def moduleDeps = Seq(core) - def generatedSources = T { def commaSep0(i: Int, f: Int => String) = Range.inclusive(1, i).map(f).mkString(", ") @@ -139,7 +172,9 @@ trait ScalaSql extends Common{ common => | ) | |""".stripMargin - s"""def batched[${commaSep(j => s"T$j")}](${commaSep(j => s"f$j: V[Column] => Column[T$j]")})( + s"""def batched[${commaSep(j => s"T$j")}](${commaSep(j => + s"f$j: V[Column] => Column[T$j]" + )})( | items: (${commaSep(j => s"Expr[T$j]")})* |)(implicit qr: Queryable[V[Column], R]): scalasql.query.InsertColumns[V, R] $impl""".stripMargin } @@ -165,12 +200,15 @@ trait ScalaSql extends Common{ common => val commaSepQ = commaSep(j => s"Q$j") val commaSepR = commaSep(j => s"R$j") - val joinAppendType = s"scalasql.query.JoinAppend[($commaSepQ), QA, ($commaSepQ, QA), ($commaSepR, RA)]" + val joinAppendType = + s"scalasql.query.JoinAppend[($commaSepQ), QA, ($commaSepQ, QA), ($commaSepR, RA)]" s""" |implicit def append$i[$commaSepQ, QA, $commaSepR, RA]( | implicit qr0: Queryable.Row[($commaSepQ, QA), ($commaSepR, RA)], | @annotation.nowarn("msg=never used") qr20: Queryable.Row[QA, RA]): $joinAppendType = new $joinAppendType { - | override def appendTuple(t: ($commaSepQ), v: QA): ($commaSepQ, QA) = (${commaSep(j => s"t._$j")}, v) + | override def appendTuple(t: ($commaSepQ), v: QA): ($commaSepQ, QA) = (${commaSep(j => + s"t._$j" + )}, v) | | def qr: Queryable.Row[($commaSepQ, QA), ($commaSepR, RA)] = qr0 |}""".stripMargin diff --git a/docs/generateDocs.mill b/docs/generateDocs.mill index e9f73891..4f1b771a 100644 --- a/docs/generateDocs.mill +++ b/docs/generateDocs.mill @@ -5,6 +5,7 @@ def generateTutorial(sourcePath: os.Path, destPath: os.Path) = { var isDocs = Option.empty[Int] var isCode = false val outputLines = collection.mutable.Buffer.empty[String] + val snippets = collection.mutable.HashMap.empty[String, scala.collection.BufferedIterator[String]] outputLines.append(generatedCodeHeader) for (line <- os.read.lines(sourcePath)) { val isDocsIndex = line.indexOf("// +DOCS") @@ -25,6 +26,24 @@ def generateTutorial(sourcePath: os.Path, destPath: os.Path) = { (suffix, isCode) match{ case ("", _) => outputLines.append("") + case (s"// +INCLUDE SNIPPET [$key] $rest", _) => + // reuse the iterator each time, + // basically assume snippets are requested in order. + val sublines: scala.collection.BufferedIterator[String] = snippets.getOrElseUpdate(rest, os.read.lines(mill.api.WorkspaceRoot.workspaceRoot / os.SubPath(rest)).iterator.buffered) + val start = s"// +SNIPPET [$key]" + val end = s"// -SNIPPET [$key]" + while (sublines.hasNext && !sublines.head.contains(start)) { + sublines.next() // drop lines until we find the start + } + val indent = sublines.headOption.map(_.indexOf(start)).getOrElse(-1) + if (indent != -1) { + sublines.next() // skip the start line + while (sublines.hasNext && !sublines.head.contains(end)) { + outputLines.append(sublines.next().drop(indent)) + } + } else { + outputLines.append("") + } case (s"// +INCLUDE $rest", _) => os.read.lines(mill.api.WorkspaceRoot.workspaceRoot / os.SubPath(rest)).foreach(outputLines.append) @@ -50,11 +69,14 @@ def generateTutorial(sourcePath: os.Path, destPath: os.Path) = { } def generateReference(dest: os.Path, scalafmtCallback: (Seq[os.Path], os.Path) => Unit) = { def dropExprPrefix(s: String) = s.split('.').drop(2).mkString(".") + def dropNTExprPrefix(s: String) = s.split('.').drop(3).mkString(".") val records = upickle.default.read[Seq[Record]](os.read.stream(mill.api.WorkspaceRoot.workspaceRoot / "out" / "recordedTests.json")) + val ntRecords = upickle.default.read[Seq[Record]](os.read.stream(mill.api.WorkspaceRoot.workspaceRoot / "out" / "recordedTestsNT.json")) val suiteDescriptions = upickle.default.read[Map[String, String]](os.read.stream(mill.api.WorkspaceRoot.workspaceRoot / "out" / "recordedSuiteDescriptions.json")) .map{case (k, v) => (dropExprPrefix(k), v)} - val rawScalaStrs = records.flatMap(r => Seq(r.queryCodeString) ++ r.resultCodeString) + val rawScalaStrs = (records ++ ntRecords) + .flatMap(r => Seq(r.queryCodeString) ++ r.resultCodeString) val formattedScalaStrs = { val tmps = rawScalaStrs.map(os.temp(_, suffix = ".scala")) scalafmtCallback(tmps, mill.api.WorkspaceRoot.workspaceRoot / ".scalafmt.conf") @@ -124,6 +146,10 @@ def generateReference(dest: os.Path, scalafmtCallback: (Seq[os.Path], os.Path) = |databases, due to differences in how each database parses SQL. These differences |are typically minor, and as long as you use the right `Dialect` for your database |ScalaSql should do the right thing for you. + | + |>**A note for users of `SimpleTable`**: The examples in this document assume usage of + |>`Table`, with a higher kinded type parameter on a case class. If you are using + |>`SimpleTable`, then the same code snippets should work by dropping `[Sc]`. |""".stripMargin ) val recordsWithoutDuplicateSuites = records @@ -132,15 +158,26 @@ def generateReference(dest: os.Path, scalafmtCallback: (Seq[os.Path], os.Path) = .sortBy(_._2.head.suiteLine) .distinctBy { case (k, v) => dropExprPrefix(k)} .map{case (k, vs) => (dropExprPrefix(k), vs.map(r => r.copy(suiteName = dropExprPrefix(r.suiteName))))} + val ntRecordsWithoutDuplicateSuites = ntRecords + .groupBy(_.suiteName) + .toSeq + .sortBy(_._2.head.suiteLine) + .distinctBy { case (k, v) => dropNTExprPrefix(k)} + .map{case (k, vs) => (dropNTExprPrefix(k), vs.map(r => r.copy(suiteName = dropNTExprPrefix(r.suiteName))))} + .toMap for((suiteName, suiteGroup) <- recordsWithoutDuplicateSuites) { val seen = mutable.Set.empty[String] outputLines.append(s"## $suiteName") outputLines.append(suiteDescriptions(suiteName)) var lastSeen = "" - for(r <- suiteGroup){ - - val prettyName = (r.suiteName +: r.testPath).mkString(".") + var remainingNTRecords = ntRecordsWithoutDuplicateSuites + .get(suiteName) + .getOrElse(Seq.empty).groupBy {r => + val prettyName = (r.suiteName +: r.testPath).mkString(".") + prettyName + } + def addRecord(r: Record, prettyName: String) = { val titleOpt = if (prettyName == lastSeen) Some("----") else if (!seen(prettyName)) Some(s"### $prettyName") @@ -151,21 +188,29 @@ def generateReference(dest: os.Path, scalafmtCallback: (Seq[os.Path], os.Path) = lastSeen = prettyName outputLines.append( s"""$title - | - |${dedent(r.docs, "")} - | - |```scala - |${scalafmt(r.queryCodeString)} - |``` - | - |${sqlFormat(r.sqlString)} - | - |${renderResult(r.resultCodeString)} - | - |""".stripMargin + | + |${dedent(r.docs, "")} + | + |```scala + |${scalafmt(r.queryCodeString)} + |``` + | + |${sqlFormat(r.sqlString)} + | + |${renderResult(r.resultCodeString)} + | + |""".stripMargin ) } } + for(r <- suiteGroup){ + val prettyName = (r.suiteName +: r.testPath).mkString(".") + addRecord(r, prettyName) + remainingNTRecords -= prettyName + } + for((prettyName, rs) <- remainingNTRecords; r <- rs) { + addRecord(r, prettyName) + } } os.write.over(dest, outputLines.mkString("\n")) } diff --git a/docs/reference.md b/docs/reference.md index 7220e9b3..51f024b8 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -16,6 +16,10 @@ databases, due to differences in how each database parses SQL. These differences are typically minor, and as long as you use the right `Dialect` for your database ScalaSql should do the right thing for you. +>**A note for users of `SimpleTable`**: The examples in this document assume usage of +>`Table`, with a higher kinded type parameter on a case class. If you are using +>`SimpleTable`, then the same code snippets should work by dropping `[Sc]`. + ## DbApi Basic usage of `db.*` operations such as `db.run` ### DbApi.renderSql @@ -416,7 +420,9 @@ try { throw new FooException } -} catch { case e: FooException => /*donothing*/ } +} catch { + case e: FooException => /*donothing*/ +} dbClient.transaction(_.run(Purchase.select.size)) ==> 7 ``` @@ -10280,6 +10286,80 @@ result._2 ==> None +### DataTypes.enclosing - with SimpleTable + +You can nest `case class`es in other `case class`es to DRY up common sets of +table columns. These nested `case class`es have their columns flattened out +into the enclosing `case class`'s columns, such that at the SQL level it is +all flattened out without nesting. + +**Important**: When using nested `case class`es with `SimpleTable`, +make sure to extend `SimpleTable.Nested` in the nested class. + +```scala +// case class Nested( +// fooId: Int, +// myBoolean: Boolean, +// ) extends SimpleTable.Nested +// object Nested extends SimpleTable[Nested] +// +// case class Enclosing( +// barId: Int, +// myString: String, +// foo: Nested +// ) +// object Enclosing extends SimpleTable[Enclosing] +val value1 = Enclosing( + barId = 1337, + myString = "hello", + foo = Nested( + fooId = 271828, + myBoolean = true + ) +) +val value2 = Enclosing( + barId = 31337, + myString = "world", + foo = Nested( + fooId = 1618, + myBoolean = false + ) +) + +val insertColumns = Enclosing.insert.columns( + _.barId := value1.barId, + _.myString := value1.myString, + _.foo.fooId := value1.foo.fooId, + _.foo.myBoolean := value1.foo.myBoolean +) +db.renderSql(insertColumns) ==> + "INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) VALUES (?, ?, ?, ?)" + +db.run(insertColumns) ==> 1 + +val insertValues = Enclosing.insert.values(value2) +db.renderSql(insertValues) ==> + "INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) VALUES (?, ?, ?, ?)" + +db.run(insertValues) ==> 1 + +db.renderSql(Enclosing.select) ==> """ + SELECT + enclosing0.bar_id AS bar_id, + enclosing0.my_string AS my_string, + enclosing0.foo_id AS foo_id, + enclosing0.my_boolean AS my_boolean + FROM enclosing enclosing0 + """ + +db.run(Enclosing.select) ==> Seq(value1, value2) +``` + + + + + + ## Optional Queries using columns that may be `NULL`, `Expr[Option[T]]` or `Option[T]` in Scala ### Optional @@ -11131,6 +11211,200 @@ db.run(OptDataTypes.select) ==> Seq(rowSome, rowNone) +### Optional.filter - with SimpleTable + +`.filter` follows normal Scala semantics, and translates to a `CASE`/`WHEN (foo)`/`ELSE NULL` + +```scala +OptCols.select.map(d => d.updates(_.myInt(_.filter(_ < 2)))) +``` + + +* + ```sql + SELECT + CASE + WHEN (opt_cols0.my_int < ?) THEN opt_cols0.my_int + ELSE NULL + END AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(None, None), + OptCols(Some(1), Some(2)), + OptCols(None, None), + OptCols(None, Some(4)) + ) + ``` + + + +### Optional.getOrElse - with SimpleTable + + + +```scala +OptCols.select.map(d => d.updates(_.myInt(_.getOrElse(-1)))) +``` + + +* + ```sql + SELECT + COALESCE(opt_cols0.my_int, ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(Some(-1), None), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(Some(-1), Some(4)) + ) + ``` + + + +### Optional.rawGet - with SimpleTable + + + +```scala +OptCols.select.map(d => d.updates(_.myInt := d.myInt.get + d.myInt2.get + 1)) +``` + + +* + ```sql + SELECT + ((opt_cols0.my_int + opt_cols0.my_int2) + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(None, None), + OptCols(Some(4), Some(2)), + // because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too + OptCols(None, None), + OptCols(None, Some(4)) + ) + ``` + + + +### Optional.orElse - with SimpleTable + + + +```scala +OptCols.select.map(d => d.updates(_.myInt(_.orElse(d.myInt2)))) +``` + + +* + ```sql + SELECT + COALESCE(opt_cols0.my_int, opt_cols0.my_int2) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(None, None), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(Some(4), Some(4)) + ) + ``` + + + +### Optional.flatMap - with SimpleTable + + + +```scala +OptCols.select + .map(d => d.updates(_.myInt(_.flatMap(v => d.myInt2.map(v2 => v + v2 + 10))))) +``` + + +* + ```sql + SELECT + ((opt_cols0.my_int + opt_cols0.my_int2) + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(None, None), + OptCols(Some(13), Some(2)), + // because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too + OptCols(None, None), + OptCols(None, Some(4)) + ) + ``` + + + +### Optional.map - with SimpleTable + +You can use operators like `.map` and `.flatMap` to work with +your `Expr[Option[V]]` values. These roughly follow the semantics +that you would be familiar with from Scala. + +```scala +OptCols.select.map(d => d.updates(_.myInt(_.map(_ + 10)))) +``` + + +* + ```sql + SELECT + (opt_cols0.my_int + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ``` + + + +* + ```scala + Seq( + OptCols(None, None), + OptCols(Some(11), Some(2)), + OptCols(Some(13), None), + OptCols(None, Some(4)) + ) + ``` + + + ## PostgresDialect Operations specific to working with Postgres Databases ### PostgresDialect.distinctOn diff --git a/docs/tutorial.md b/docs/tutorial.md index ba10f6a7..35ed59db 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -1,7 +1,7 @@ [//]: # (GENERATED SOURCES, DO NOT EDIT DIRECTLY) -This tutorials is a tour of how to use ScalaSql, from the most basic concepts +This tutorial is a tour of how to use ScalaSql, from the most basic concepts to writing some realistic queries. If you are browsing this on Github, you can open the `Outline` pane on the right to browse the section headers to see what we will cover and find anything specific of interest to you. @@ -71,8 +71,14 @@ supported databases, to see what kind of set up is necessary for each one ### Modeling Your Schema Next, you need to define your data model classes. In ScalaSql, your data model -is defined using `case class`es with each field wrapped in the wrapper type -parameter `T[_]`. This allows us to re-use the same case class to represent +is defined using `case class`es with each field representing a column in an database table. + +There are two flavors to consider: `Table` (available for Scala 2.13+), and `SimpleTable` (Scala 3.7+). + +**Using `Table`** + +Declare your case class with a type parameter `T[_]`, which is used to wrap the type of each +field. This allows us to re-use the same case class to represent both database values (when `T` is `scalasql.Expr`) as well as Scala values (when `T` is `scalasql.Sc`). @@ -120,6 +126,59 @@ case class CountryLanguage[T[_]]( object CountryLanguage extends Table[CountryLanguage]() ``` +**Using `SimpleTable`** +> Note: only available in the `com.lihaoyi::scalasql-namedtuples` library, which supports Scala 3.7.0+ + +Declare your case class as usual. Inside of queries, the class will be represented by a `Record` with the same fields, but wrapped in `scalasql.Expr`. + +Here, we define three classes `Country` `City` and `CountryLanguage`, modeling +the database tables we saw above. + +Also included is the necessary import statement to include the `SimpleTable` definition. + +```scala +import scalasql.simple.{*, given} + +case class Country( + code: String, + name: String, + continent: String, + region: String, + surfaceArea: Int, + indepYear: Option[Int], + population: Long, + lifeExpectancy: Option[Double], + gnp: Option[scala.math.BigDecimal], + gnpOld: Option[scala.math.BigDecimal], + localName: String, + governmentForm: String, + headOfState: Option[String], + capital: Option[Int], + code2: String +) + +object Country extends SimpleTable[Country] + +case class City( + id: Int, + name: String, + countryCode: String, + district: String, + population: Long +) + +object City extends SimpleTable[City] + +case class CountryLanguage( + countryCode: String, + language: String, + isOfficial: Boolean, + percentage: Double +) + +object CountryLanguage extends SimpleTable[CountryLanguage] +``` + ### Creating Your Database Client Lastly, we need to initialize our `scalasql.DbClient`. This requires passing in a `java.sql.Connection`, a `scalasql.Config` object, and the SQL dialect @@ -201,11 +260,16 @@ db.run(query).take(3) ==> Seq( ) ``` -Notice that `db.run` returns instances of type `City[Sc]`. `Sc` is `scalasql.Sc`, +Notice that `db.run` returns instances of type `City[Sc]` (or `City` if using `SimpleTable`). + +`Sc` is `scalasql.Sc`, short for the "Scala" type, representing a `City` object containing normal Scala values. The `[Sc]` type parameter must be provided explicitly whenever creating, type-annotating, or otherwise working with these `City` values. +> In this tutorial, unless otherwise specified, we will assume usage of the `Table` encoding. +> If you are using `SimpleTable`, the same code will work, but drop `[Sc]` type arguments. + In this example, we do `.take(3)` after running the query to show only the first 3 table entries for brevity, but by that point the `City.select` query had already fetched the entire database table into memory. This can be a problem with non-trivial @@ -235,8 +299,12 @@ db.run(query) ==> City[Sc](3208, "Singapore", "SGP", district = "", population = ``` Note that we use `===` rather than `==` for the equality comparison. The function literal passed to `.filter` is given a `City[Expr]` as its parameter, -representing a `City` that is part of the database query, in contrast to the -`City[Sc]`s that `db.run` returns , and so `_.name` is of type `Expr[String]` +(or `Record[City, Expr]` with the `SimpleTable` encoding) representing a `City` +that is part of the database query, in contrast to the +`City[Sc]`s that `db.run` returns. + +Within a query therefore `_.name` is a field selection on the function parameter, +resulting in `Expr[String]`, rather than just `String` or `Sc[String]`. You can use your IDE's auto-complete to see what operations are available on `Expr[String]`: typically they will represent SQL string functions rather than Scala string functions and @@ -309,7 +377,8 @@ db.run(query).take(2) ==> Seq( ) ``` -Again, all the operations within the query work on `Expr`s: `c` is a `City[Expr]`, +Again, all the operations within the query work on `Expr`s: +`c` is a `City[Expr]` (or `Record[City, Expr]` for `SimpleTable`), `c.population` is an `Expr[Int]`, `c.countryCode` is an `Expr[String]`, and `===` and `>` and `&&` on `Expr`s all return `Expr[Boolean]`s that represent a SQL expression that can be sent to the Database as part of your query. @@ -427,8 +496,60 @@ db.run(query) ==> "SINGAPORE", 4 // population in millions ) + +``` + +**Mapping with named tuples** +> Note: only available in the `com.lihaoyi::scalasql-namedtuples` library, which supports Scala 3.7.0+ + +You can also use named tuples to map the results of a query. +```scala +// `NamedTupleQueryable` is also included by `import scalasql.simple.given` +import scalasql.namedtuples.NamedTupleQueryable.given + +val query = Country.select.map(c => (name = c.name, continent = c.continent)) + +db.run(query).take(5) ==> Seq( + (name = "Afghanistan", continent = "Asia"), + (name = "Netherlands", continent = "Europe"), + (name = "Netherlands Antilles", continent = "North America"), + (name = "Albania", continent = "Europe"), + (name = "Algeria", continent = "Africa") +) +``` + +**Updating `Record` fields** +> Note: only relevant when using the `SimpleTable` encoding. + +When using `SimpleTable`, within the `.map` query `c` is of type +`Record[Country, Expr]`. Records are converted back to their associated case class +(e.g. `Country`) with `db.run`. + +If you want to apply updates to any of the fields before returning, the `Record` class +provides an `updates` method. This lets you provide an arbitrary sequence of updates to +apply in-order to the record. You can either provide a value with `:=`, +or provide a function that transforms the old value. For example: + +```scala +val query = Country.select.map(c => + c.updates( + _.population := 0L, + _.name(old => Expr("🌐 ") + old) + ) +) + +db.run(query).take(5).match { + case Seq( + Country(name = "🌐 Afghanistan", population = 0L), + Country(name = "🌐 Netherlands", population = 0L), + Country(name = "🌐 Netherlands Antilles", population = 0L), + Country(name = "🌐 Albania", population = 0L), + Country(name = "🌐 Algeria", population = 0L) + ) => +} ==> () ``` + ### Aggregates You can perform simple aggregates like `.sum` as below, where we @@ -1213,7 +1334,9 @@ try { throw new Exception() } -} catch { case e: Exception => /*do nothing*/ } +} catch { + case e: Exception => /*do nothing*/ +} dbClient.transaction { implicit db => db.run(City.select.filter(_.countryCode === "SGP").single) ==> @@ -1255,7 +1378,9 @@ dbClient.transaction { implicit db => db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() throw new Exception() } - } catch { case e: Exception => /*do nothing*/ } + } catch { + case e: Exception => /*do nothing*/ + } db.run(City.select.filter(_.countryCode === "SGP").single) ==> City[Sc](3208, "Singapore", "SGP", district = "", population = 4017733) @@ -1357,6 +1482,23 @@ db.run( db.run(City.select.filter(_.id === 313373).single) ==> City[Sc](CityId(313373), "test", "XYZ", "district", 1000000) + + +``` +You can also use `TypeMapper#bimap` for the common case where you want the +new `TypeMapper` to behave the same as an existing `TypeMapper`, just with +conversion functions to convert back and forth between the old type and new type: + +```scala +case class CityId2(value: Int) + +object CityId2 { + implicit def tm: TypeMapper[CityId2] = TypeMapper[Int].bimap[CityId2]( + city => city.value, + int => CityId2(int) + ) +} + ``` ```scala @@ -1383,8 +1525,7 @@ db.run( db.run(City2.select.filter(_.id === 31337).single) ==> City2[Sc](CityId2(31337), "test", "XYZ", "district", 1000000) - -st("customTableColumnNames") { +``` ## Customizing Table and Column Names diff --git a/readme.md b/readme.md index b56b8556..5ec746a6 100644 --- a/readme.md +++ b/readme.md @@ -56,12 +56,12 @@ dbClient.transaction{ db => } ``` -ScalaSql supports database connections to PostgreSQL, MySQL, Sqlite, and H2 databases. +ScalaSql supports database connections to PostgreSQL, MySQL, Sqlite, and H2 databases. Support for additional databases can be easily added. ScalaSql is a relatively new library, so please try it out, but be aware you may hit bugs or missing features! Please open [Discussions](https://github.com/com-lihaoyi/scalasql/discussions) -for any questions, file [Issues](https://github.com/com-lihaoyi/scalasql/issues) for any +for any questions, file [Issues](https://github.com/com-lihaoyi/scalasql/issues) for any bugs you hit, or send [Pull Requests](https://github.com/com-lihaoyi/scalasql/pulls) if you are able to investigate and fix them! @@ -76,10 +76,56 @@ ivy"com.lihaoyi::scalasql:0.1.19" ScalaSql supports Scala 2.13.x and >=3.6.2 +### SimpleTable variant based on named tuples + +For Scala versions >=3.7.0 supporting named tuples, an alternative way to define tables is supported. + +Add the following to your `build.sc` file as follows: + + +```scala +ivy"com.lihaoyi::scalasql-namedtuples:0.1.19" +``` + +And taking the example above, the only thing that needs to change is the following: +```diff +-import scalasql._, SqliteDialect._ ++import scalasql.simple._, SqliteDialect._ + + // Define your table model classes +-case class City[T[_]]( +- id: T[Int], +- name: T[String], +- countryCode: T[String], +- district: T[String], +- population: T[Long] +-) +-object City extends Table[City] ++case class City( ++ id: Int, ++ name: String, ++ countryCode: String, ++ district: String, ++ population: Long ++) ++object City extends SimpleTable[City] +``` + +And you now have the option to return named tuples from queries: +```diff + val fewLargestCities = db.run( + City.select + .sortBy(_.population).desc + .drop(5).take(3) +- .map(c => (c.name, c.population)) ++ .map(c => (name = c.name, pop = c.population)) + ) +``` + ## Documentation * ScalaSql Quickstart Examples: self-contained files showing how to set up ScalaSql to - connect your Scala code to a variety of supported databases and perform simple DDL and + connect your Scala code to a variety of supported databases and perform simple DDL and `SELECT`/`INSERT`/`UPDATE`/`DELETE` operations: * [Postgres](scalasql/test/src/example/PostgresExample.scala) * [MySql](scalasql/test/src/example/MySqlExample.scala) @@ -104,7 +150,7 @@ ScalaSql supports Scala 2.13.x and >=3.6.2 to execute queries * [Transaction](docs/reference.md#transaction), covering usage of transactions and savepoints - * [Select](docs/reference.md#select), [Insert](docs/reference.md#insert), + * [Select](docs/reference.md#select), [Insert](docs/reference.md#insert), [Update](docs/reference.md#update), [Delete](docs/reference.md#delete): covering operations on the primary queries you are likely to use * [Join](docs/reference.md#join), covering different kinds of joins @@ -113,14 +159,14 @@ ScalaSql supports Scala 2.13.x and >=3.6.2 * [Expression Operations](docs/reference.md#exprops), covering the different types of `Expr[T]` values and the different operations you can do on each one * [Option Operations](docs/reference.md#optional), operations on `Expr[Option[T]` - * [Window Functions](docs/reference.md#windowfunctions), + * [Window Functions](docs/reference.md#windowfunctions), [With-Clauses/Common-Table-Expressions](docs/reference.md#withcte) * [Postgres](docs/reference.md#postgresdialect), [MySql](docs/reference.md#mysqldialect), [Sqlite](docs/reference.md#sqlitedialect), [H2](docs/reference.md#h2dialect) Dialects: operations that are specific to each database that may not be generally applicable * [ScalaSql Design](docs/design.md): discusses the design of the ScalaSql library, why it - is built the way it is, what tradeoffs it makes, and how it compares to other + is built the way it is, what tradeoffs it makes, and how it compares to other common Scala database query libraries. Ideal for contributors who want to understand the structure of the ScalaSql codebase, or for advanced users who may need to understand enough to extend ScalaSql with custom functionality. diff --git a/scalasql/namedtuples/src/NamedTupleQueryable.scala b/scalasql/namedtuples/src/NamedTupleQueryable.scala new file mode 100644 index 00000000..5789b3b1 --- /dev/null +++ b/scalasql/namedtuples/src/NamedTupleQueryable.scala @@ -0,0 +1,84 @@ +package scalasql.namedtuples + +import scala.NamedTuple.NamedTuple +import scalasql.core.{Queryable, Expr} + +object NamedTupleQueryable { + + /** A sequence of n `Queryable.Row[Q, R]` instances, where `X` corresponds to all the `Q` and `Y` to all the `R` */ + opaque type Rows[X <: Tuple, +Y <: Tuple] = List[Queryable.Row[?, ?]] + + object Rows { + // it seems "traditional" recursive implicit search is the only way to infer the types of `R` when only `Qs` is known. + // see https://gist.github.com/bishabosha/e630f76384093153b17f1498a9459518 for a variant that + // uses compiletime.summonAll, but it does a double implicit search, so wasnt chosen for the moment. + + given concatRows: [Q, R, Qs <: Tuple, Rs <: Tuple] + => (x: Queryable.Row[Q, R]) + => (xs: Rows[Qs, Rs]) + => Rows[Q *: Qs, R *: Rs] = + x :: xs + + given emptyRows: Rows[EmptyTuple, EmptyTuple] = Nil + } + + /** + * A `Queryable.Row` instance for an arbitrary named tuple type, can be derived even + * when one of `X` or `Y` is unknown. + */ + given NamedTupleRow: [N <: Tuple, X <: Tuple, Y <: Tuple] + => (rs: Rows[X, Y]) + => Queryable.Row[NamedTuple[N, X], NamedTuple[N, Y]] = + NamedTupleRowImpl[N, X, Y](rs) + + private final class NamedTupleRowImpl[ + N <: Tuple, + X <: Tuple, + Y <: Tuple + ]( + rs: List[Queryable.Row[?, ?]] + ) extends Queryable.Row[NamedTuple[N, X], NamedTuple[N, Y]]: + def walkExprs(q: NamedTuple[N, X]): Seq[Expr[?]] = { + val walkExprs0 = { + val ps = q.toTuple.productIterator + rs.iterator + .zip(ps) + .map({ (row, p) => + type Q + type R + val q = p.asInstanceOf[Q] + row.asInstanceOf[Queryable.Row[Q, R]].walkExprs(q) + }) + } + + walkExprs0.zipWithIndex + .map { case (v, i) => (i.toString, v) } + .flatMap { case (prefix, vs0) => vs0 } + .toIndexedSeq + } + def walkLabels(): Seq[List[String]] = { + val walkLabels0 = rs.iterator.map(_.walkLabels()) + walkLabels0.zipWithIndex + .map { case (v, i) => (i.toString, v) } + .flatMap { case (prefix, vs0) => vs0.map { k => prefix +: k } } + .toIndexedSeq + } + def construct(args: scalasql.core.Queryable.ResultSetIterator): NamedTuple.NamedTuple[N, Y] = + val data = IArray.from(rs.iterator.map(_.construct(args))) + Tuple.fromIArray(data).asInstanceOf[NamedTuple.NamedTuple[N, Y]] + + def deconstruct(r: NamedTuple.NamedTuple[N, Y]): NamedTuple.NamedTuple[N, X] = + val data = IArray.from { + val ps = r.toTuple.productIterator + rs.iterator + .zip(ps) + .map({ (row, p) => + type Q + type R + val r = p.asInstanceOf[R] + row.asInstanceOf[Queryable.Row[Q, R]].deconstruct(r) + }) + } + Tuple.fromIArray(data).asInstanceOf[NamedTuple.NamedTuple[N, X]] + +} diff --git a/scalasql/namedtuples/src/SimpleTable.scala b/scalasql/namedtuples/src/SimpleTable.scala new file mode 100644 index 00000000..a1efa508 --- /dev/null +++ b/scalasql/namedtuples/src/SimpleTable.scala @@ -0,0 +1,205 @@ +package scalasql.namedtuples + +import scala.NamedTuple.{AnyNamedTuple, NamedTuple} + +import scalasql.query.Table +import scalasql.core.DialectTypeMappers +import scalasql.core.Queryable +import scalasql.query.Column +import scalasql.core.Sc +import scalasql.core.Expr + +import scala.compiletime.asMatchable + +/** + * In-code representation of a SQL table, associated with a given `case class` `C`. + * + * Note that if a field of `C` is a case class `X` that also provides SimpleTable metadata, + * then `X` must extend [[package.SimpleTable.Nested SimpleTable.Nested]]. + * + * `SimpleTable` extends `Table`, sharing its underlying metadata. + * Compared to `Table`, it allows to `C` to not require a higher-kinded type parameter. + * Consequently a [[package.SimpleTable.Record Record]] is used in queries + * rather than `C` itself. + */ +class SimpleTable[C]( + using name: sourcecode.Name, + metadata0: Table.Metadata[[T[_]] =>> SimpleTable.MapOver[C, T]] +) extends Table[[T[_]] =>> SimpleTable.MapOver[C, T]](using name, metadata0) { + given simpleTableGivenMetadata: SimpleTable.GivenMetadata[C] = + SimpleTable.GivenMetadata(metadata0) +} + +object SimpleTable { + + /** + * Marker class that signals that a data type is convertable to an SQL table row. + * @note this must be a class to convince the match type reducer that it provably can't be mixed + * into various column types such as `java.util.Date`, `geny.Bytes`, or `scala.Option`. + */ + abstract class Nested + + /** + * A type that can map `T` over the fields of `C`. If `T` is the identity then `C` itself, + * else a [[SimpleTable.Record Record]]. + * + * @tparam C the case class type + * @tparam T the type constructor to map over the fields of `C`. + */ + type MapOver[C, T[_]] = T[Internal.Tombstone.type] match { + case Internal.Tombstone.type => C // T is `Sc` + case _ => Record[C, T] + } + + /** Super type of all [[SimpleTable.Record Record]]. */ + sealed trait AnyRecord extends Product with Serializable + + /** + * Record is a fixed size product type, its fields correspond to the fields of `C` + * mapped over by `T` (see [[Record#Fields Fields]] for more information). + * + * @see [[Record#Fields Fields]] for how the fields are mapped. + */ + final class Record[C, T[_]](private val data: IArray[AnyRef]) extends AnyRecord with Selectable: + + /** + * For each field `x: X` of class `C` there exists a field `x` in this record of type + * `Record[X, T]` if `X` is a case class that represents a table, or `T[X]` otherwise. + */ + type Fields = NamedTuple.Map[ + NamedTuple.From[C], + [X] =>> X match { + case Nested => Record[X, T] + case _ => T[X] + } + ] + def apply(i: Int): AnyRef = data(i) + def canEqual(that: Any): Boolean = that.isInstanceOf[Record[?, ?]] + override def productPrefix: String = "Record" + def productArity: Int = data.length + def productElement(i: Int): AnyRef = data(i) + override def equals(that: Any): Boolean = that.asMatchable match + case _: this.type => true + case r: Record[?, ?] => + r.canEqual(this) && IArray.equals(data, r.data) + case _ => false + + /** + * Apply a sequence of patches to the record. e.g. + * ``` + * case class Foo(arg1: Int, arg2: String) + * val r: Record[Foo, Expr] + * val r0 = r.updates(_.arg1(_ * 2), _.arg2 := "bar") + * ``` + * + * @param fs a sequence of functions that create a patch from a [[RecordUpdater]]. + * Each field of the record updater is typed as a [[SimpleTable.Field Field]], + * corresponding to the fields of `C` mapped over by `T`. + * in this record. + * @return a new record (of the same type) with the patches applied. + */ + def updates(fs: (RecordUpdater[C, T] => Patch)*): Record[C, T] = + val u = recordUpdater[C, T] + val arr = IArray.genericWrapArray(data).toArray + fs.foreach: f => + val patch = f(u) + val idx = patch.idx + arr(idx) = patch.f(arr(idx)) + Record(IArray.unsafeFromArray(arr)) + + inline def selectDynamic(name: String): AnyRef = + apply(compiletime.constValue[Record.IndexOf[name.type, Record.Names[C], 0]]) + + private object RecordUpdaterImpl extends RecordUpdater[Any, [T] =>> Any] + def recordUpdater[C, T[_]]: RecordUpdater[C, T] = + RecordUpdaterImpl.asInstanceOf[RecordUpdater[C, T]] + + /** A single update to a field of a `Record[C, T]`, used by [[Record#updates]] */ + final class Patch private[SimpleTable] ( + private[SimpleTable] val idx: Int, + private[SimpleTable] val f: AnyRef => AnyRef + ) + + /** A `Field[T]` is used to create a patch for a field in a [[SimpleTable.Record Record]]. */ + final class Field[T](private val factory: (T => T) => Patch) extends AnyVal + object Field { + extension [T](field: Field[T]) { + + /** Create a patch that replaces the old value with `x` */ + def :=(x: T): Patch = field.factory(Function.const(x)) + + /** Create a patch that can transform the old value with `f` */ + def apply(f: T => T): Patch = field.factory(f) + } + } + + /** + * A Record updaters fields correspond to `Record[C, T]`, where each accepts a + * function to update a field. e.g. + * ``` + * case class Foo(arg1: Int, arg2: String) + * val u: RecordUpdater[Foo, Expr] + * val p0: Patch = u.arg1(_ * 2) + * val p1: Patch = u.arg2 := "bar" + * ``` + * This class is mainly used to provide patches to + * the [[Record#updates updates]] method of `Record`. + * (See [[RecordUpdater#Fields Fields]] for more information on how the fields are typed.) + * + * @see [[Record#updates updates]] for how to apply the patches. + * @see [[RecordUpdater#Fields Fields]] for how the fields are mapped. + */ + sealed trait RecordUpdater[C, T[_]] extends Selectable: + + /** + * For each field `x: X` of class `C` + * there exists a field `x: Field[X']` in this record updater. `X'` is instantiated to + * `Record[X, T]` if `X` is a case class that represents a table, or `T[X]` otherwise. + */ + type Fields = NamedTuple.Map[ + NamedTuple.From[C], + [X] =>> X match { + case Nested => Field[Record[X, T]] + case _ => Field[T[X]] + } + ] + def apply(i: Int): Field[AnyRef] = + new Field(f => Patch(i, f)) + inline def selectDynamic(name: String): Field[AnyRef] = + apply(compiletime.constValue[Record.IndexOf[name.type, Record.Names[C], 0]]) + + object Record: + import scala.compiletime.ops.int.* + + /** Tuple of literal strings corresponding to the fields of case class `C` */ + type Names[C] = NamedTuple.Names[NamedTuple.From[C]] + + /** The literal `Int` type corresponding to the index of `N` in `T`, or `-1` if not found. */ + type IndexOf[N, T <: Tuple, Acc <: Int] <: Int = T match { + case EmptyTuple => -1 + case N *: _ => Acc + case _ *: t => IndexOf[N, t, S[Acc]] + } + + /** Factory that creates an arbitrary Record */ + def fromIArray(data: IArray[AnyRef]): AnyRecord = + Record(data) + + /** Internal API of SimpleTable */ + object Internal extends SimpleTableMacros { + + /** An object with singleton type that is provably disjoint from most other types. */ + case object Tombstone + } + + /** A type that gives access to the Table metadata of `C`. */ + opaque type GivenMetadata[C] = GivenMetadata.Inner[C] + object GivenMetadata { + type Inner[C] = Table.Metadata[[T[_]] =>> SimpleTable.MapOver[C, T]] + def apply[C](metadata: Inner[C]): GivenMetadata[C] = metadata + extension [C](m: GivenMetadata[C]) { + def metadata: Inner[C] = m + } + } + +} diff --git a/scalasql/namedtuples/src/SimpleTableMacros.scala b/scalasql/namedtuples/src/SimpleTableMacros.scala new file mode 100644 index 00000000..6a97f83a --- /dev/null +++ b/scalasql/namedtuples/src/SimpleTableMacros.scala @@ -0,0 +1,261 @@ +package scalasql.namedtuples + +import scalasql.query.Table +import scalasql.core.Queryable +import scalasql.core.DialectTypeMappers +import scalasql.core.Expr +import scalasql.core.Sc +import scalasql.query.TableRef +import scalasql.query.Column +import scala.deriving.Mirror +import java.util.concurrent.atomic.AtomicReference +import scala.reflect.ClassTag +import scala.NamedTuple.AnyNamedTuple +import java.util.function.UnaryOperator +import scala.annotation.nowarn +import scalasql.namedtuples.SimpleTableMacros.BaseLabels +import scalasql.core.TypeMapper +import scala.annotation.unused +import scala.annotation.implicitNotFound + +object SimpleTableMacros { + def asIArray[T: ClassTag](t: Tuple): IArray[T] = { + IArray.from(t.productIterator.asInstanceOf[Iterator[T]]) + } + def asIArrayFlatUnwrapWithIndex[T]( + t: Tuple + )[U: ClassTag](f: (T, Int) => IterableOnce[U]): IArray[U] = { + IArray.from(t.productIterator.asInstanceOf[Iterator[T]].zipWithIndex.flatMap(f.tupled)) + } + def unwrapLabels(t: Tuple, labels: IArray[String]): IndexedSeq[String] = { + asIArrayFlatUnwrapWithIndex[BaseLabels[Any, Any]](t)((l, i) => l(labels(i))).toIndexedSeq + } + def unwrapColumns(t: Tuple): IArray[(DialectTypeMappers, TableRef) => AnyRef] = { + asIArray[ContraRefMapper[BaseColumn[Any, Any]]](t) + } + def unwrapRows(t: Tuple): IArray[DialectTypeMappers => Queryable.Row[?, ?]] = { + asIArray[ContraMapper[SimpleTableMacros.BaseRowExpr[Any]]](t) + } + def make[C](m: Mirror.ProductOf[C], data: IArray[AnyRef]): C = { + class ArrayProduct extends Product { + override def canEqual(that: Any): Boolean = false + override def productElement(n: Int): Any = data(n) + override def productIterator: Iterator[Any] = data.iterator + override def productPrefix: String = "ArrayProduct" + override def productArity: Int = data.length + } + m.fromProduct(ArrayProduct()) + } + + opaque type ContraMapper[T] = DialectTypeMappers => T + + object ContraMapper { + inline given [T]: ContraMapper[T] = + case mappers => + import mappers.given + compiletime.summonInline[T] + } + + opaque type ContraRefMapper[T] = (DialectTypeMappers, TableRef) => T + + object ContraRefMapper { + @nowarn("msg=inline given alias") + inline given [T]: ContraRefMapper[T] = + (mappers, tableRef) => + tableRef match + case given TableRef => + import mappers.given + compiletime.summonInline[T] + } + + opaque type BaseRowExpr[T] = Queryable.Row[?, ?] + object BaseRowExpr { + given summonDelegate[T]( + using @unused m: SimpleTable.GivenMetadata[T], + @unused e: T <:< SimpleTable.Nested + )( + using delegate: Queryable.Row[ + SimpleTable.MapOver[T, Expr], + SimpleTable.MapOver[T, Sc] + ] + ): BaseRowExpr[T] = delegate + given summonBasic[T](using @unused ev: scala.util.NotGiven[SimpleTable.GivenMetadata[T]])( + using delegate: Queryable.Row[Expr[T], Sc[T]] + ): BaseRowExpr[T] = delegate + } + + opaque type BaseColumn[L, T] = AnyRef + trait BaseColumnLowPrio { + given notFound: [L <: String, T] + => (l: ValueOf[L], ref: TableRef, mapper: TypeMapper[T]) + => BaseColumn[L, T] = + val col = new Column[T]( + ref, + Table.columnNameOverride(ref.value)(l.value) + ) + col + } + object BaseColumn extends BaseColumnLowPrio { + given foundMeta: [L <: String, T] + => (mappers: DialectTypeMappers, ref: TableRef, m: SimpleTable.GivenMetadata[T]) + => BaseColumn[L, T] = + m.metadata.vExpr(ref, mappers).asInstanceOf[AnyRef] + } + + opaque type BaseLabels[L, C] = String => Seq[String] + trait BaseLabelsLowPrio { + given notFound: [L <: String, C] => BaseLabels[L, C] = + label => Seq(label) + } + object BaseLabels extends BaseLabelsLowPrio { + given foundMeta: [L, C] => (m: SimpleTable.GivenMetadata[C]) => BaseLabels[L, C] = + _ => m.metadata.walkLabels0() + } + + def setNonNull[T](r: AtomicReference[T | Null])(f: => T): T = { + val local = r.get() + val res = + if local != null then local + else + r.updateAndGet(t => + if t == null then f + else t + ) + res.nn + } + + def walkAllExprs( + queryable: Table.Metadata.QueryableProxy + )(e: SimpleTable.Record[?, ?]): IndexedSeq[Expr[?]] = { + var i = 0 + val fields = e.productIterator + val buf = IndexedSeq.newBuilder[Seq[Expr[?]]] + while fields.hasNext do + type T + type Field + val field = fields.next().asInstanceOf[Field] + val row = queryable[Field, T](i) + buf += row.walkExprs(field) + i += 1 + buf.result().flatten + } + + def construct[C]( + queryable: Table.Metadata.QueryableProxy + )(size: Int, args: Queryable.ResultSetIterator, factory: IArray[AnyRef] => C): C = { + var i = 0 + val buf = IArray.newBuilder[AnyRef] + while i < size do + type T + type Field + val row = queryable[Field, T](i) + buf += row.construct(args).asInstanceOf[AnyRef] + i += 1 + factory(buf.result()) + } + + def deconstruct[R <: SimpleTable.Record[?, ?]]( + queryable: Table.Metadata.QueryableProxy + )(c: Product): R = { + var i = 0 + val buf = IArray.newBuilder[AnyRef] + val fields = c.productIterator + while fields.hasNext do + type T + type Field + val field = fields.next().asInstanceOf[T] + val row = queryable[Field, T](i) + buf += row.deconstruct(field).asInstanceOf[AnyRef] + i += 1 + SimpleTable.Record.fromIArray(buf.result()).asInstanceOf[R] + } + + def labels(t: Tuple): IArray[String] = asIArray(t) + + inline def getMirror[C]: (Tuple, Mirror.ProductOf[C]) = { + compiletime.summonFrom { case m: Mirror.ProductOf[C] => + (compiletime.constValueTuple[m.MirroredElemLabels], m) + } + } + +} + +trait SimpleTableMacros { + class SimpleTableState[C <: Product]( + mirrorPair0: => (Tuple, Mirror.ProductOf[C]), + rowsRef0: => Tuple, + colsRef0: => Tuple, + labelsRef0: => Tuple + ): + private lazy val mirrorPair = + val (names0, mirror0) = mirrorPair0 + ( + SimpleTableMacros.labels(names0), + mirror0 + ) + def labels: IArray[String] = mirrorPair(0) + def mirror: Mirror.ProductOf[C] = mirrorPair(1) + lazy val rowsRef: IArray[DialectTypeMappers => Queryable.Row[?, ?]] = + SimpleTableMacros.unwrapRows(rowsRef0) + lazy val colsRef: IArray[(DialectTypeMappers, TableRef) => AnyRef] = + SimpleTableMacros.unwrapColumns(colsRef0) + lazy val labelsRef: IndexedSeq[String] = + SimpleTableMacros.unwrapLabels(labelsRef0, labels) + + inline given initTableMetadata[C <: Product] + : Table.Metadata[[T[_]] =>> SimpleTable.MapOver[C, T]] = + type Impl[T[_]] = SimpleTable.MapOver[C, T] + type Labels = NamedTuple.Names[NamedTuple.From[C]] + type Values = NamedTuple.DropNames[NamedTuple.From[C]] + type Pairs[F[_, _]] = Tuple.Map[ + Tuple.Zip[Labels, Values], + [X] =>> X match { + case (a, b) => F[a, b] + } + ] + type FlatLabels = Pairs[SimpleTableMacros.BaseLabels] + type Columns = Pairs[[L, + T] =>> SimpleTableMacros.ContraRefMapper[SimpleTableMacros.BaseColumn[L, T]]] + type Rows = Tuple.Map[ + Values, + [T] =>> SimpleTableMacros.ContraMapper[SimpleTableMacros.BaseRowExpr[T]] + ] + + val state = new SimpleTableState[C]( + mirrorPair0 = SimpleTableMacros.getMirror[C], + rowsRef0 = compiletime.summonAll[Rows], + colsRef0 = compiletime.summonAll[Columns], + labelsRef0 = compiletime.summonAll[FlatLabels] + ) + + def queryables(mappers: DialectTypeMappers, idx: Int): Queryable.Row[?, ?] = + state.rowsRef(idx)(mappers) + + def walkLabels0(): Seq[String] = state.labelsRef + + def queryable( + walkLabels0: () => Seq[String], + @nowarn("msg=unused") mappers: DialectTypeMappers, + queryable: Table.Metadata.QueryableProxy + ): Queryable[Impl[Expr], Impl[Sc]] = Table.Internal.TableQueryable( + walkLabels0, + walkExprs0 = SimpleTableMacros.walkAllExprs(queryable), + construct0 = args => + SimpleTableMacros.construct(queryable)( + size = state.labels.size, + args = args, + factory = SimpleTableMacros.make(state.mirror, _) + ), + deconstruct0 = values => SimpleTableMacros.deconstruct[Impl[Expr]](queryable)(values) + ) + + def vExpr0( + tableRef: TableRef, + mappers: DialectTypeMappers, + @nowarn("msg=unused") queryable: Table.Metadata.QueryableProxy + ): Impl[Column] = + val columns = state.colsRef.map(_(mappers, tableRef)) + SimpleTable.Record.fromIArray(columns).asInstanceOf[Impl[Column]] + + Table.Metadata[Impl](queryables, walkLabels0, queryable, vExpr0) +} diff --git a/scalasql/namedtuples/src/simple.scala b/scalasql/namedtuples/src/simple.scala new file mode 100644 index 00000000..acf0460a --- /dev/null +++ b/scalasql/namedtuples/src/simple.scala @@ -0,0 +1,6 @@ +package scalasql + +package object simple: + export scalasql.namedtuples.SimpleTable + export scalasql.namedtuples.NamedTupleQueryable.given + export scalasql.`package`.{*, given} diff --git a/scalasql/namedtuples/test/src/SimpleTableConcreteTestSuites.scala b/scalasql/namedtuples/test/src/SimpleTableConcreteTestSuites.scala new file mode 100644 index 00000000..62cf06c0 --- /dev/null +++ b/scalasql/namedtuples/test/src/SimpleTableConcreteTestSuites.scala @@ -0,0 +1,303 @@ +package scalasql.namedtuples +import scalasql.api.{TransactionTests, DbApiTests} +import scalasql.operations.{ + ExprBooleanOpsTests, + ExprNumericOpsTests, + ExprAggNumericOpsTests, + ExprAggOpsTests, + ExprOpsTests, + DbApiOpsTests, + ExprStringOpsTests, + ExprBlobOpsTests, + ExprMathOpsTests +} +import scalasql.query.{ + InsertTests, + DeleteTests, + SelectTests, + JoinTests, + FlatJoinTests, + CompoundSelectTests, + SubQueryTests, + UpdateTests, + UpdateJoinTests, + UpdateSubQueryTests, + ReturningTests, + OnConflictTests, + ValuesTests, + LateralJoinTests, + WindowFunctionTests, + GetGeneratedKeysTests, + WithCteTests, + SchemaTests, + EscapedTableNameTests, + EscapedTableNameWithReturningTests +} +import scalasql.dialects.{ + MySqlDialectTests, + PostgresDialectTests, + SqliteDialectTests, + H2DialectTests +} + +package postgres { + + import scalasql.utils.PostgresSuite + + // object DbApiTests extends DbApiTests with PostgresSuite + // object TransactionTests extends TransactionTests with PostgresSuite + + // object SelectTests extends SelectTests with PostgresSuite + // object JoinTests extends JoinTests with PostgresSuite + // object FlatJoinTests extends FlatJoinTests with PostgresSuite + // object InsertTests extends InsertTests with PostgresSuite + // object UpdateTests extends UpdateTests with PostgresSuite + // object DeleteTests extends DeleteTests with PostgresSuite + // object CompoundSelectTests extends CompoundSelectTests with PostgresSuite + // object UpdateJoinTests extends UpdateJoinTests with PostgresSuite + // object UpdateSubQueryTests extends UpdateSubQueryTests with PostgresSuite + // object ReturningTests extends ReturningTests with PostgresSuite + // object OnConflictTests extends OnConflictTests with PostgresSuite + // object ValuesTests extends ValuesTests with PostgresSuite + // object LateralJoinTests extends LateralJoinTests with PostgresSuite + // object WindowFunctionTests extends WindowFunctionTests with PostgresSuite + // object GetGeneratedKeysTests extends GetGeneratedKeysTests with PostgresSuite + // object SchemaTests extends SchemaTests with PostgresSuite + // object EscapedTableNameTests extends EscapedTableNameTests with PostgresSuite + // object EscapedTableNameWithReturningTests + // extends EscapedTableNameWithReturningTests + // with PostgresSuite + + // object SubQueryTests extends SubQueryTests with PostgresSuite + // object WithCteTests extends WithCteTests with PostgresSuite + + // object DbApiOpsTests extends DbApiOpsTests with PostgresSuite + // object ExprOpsTests extends ExprOpsTests with PostgresSuite + // object ExprBooleanOpsTests extends ExprBooleanOpsTests with PostgresSuite + // object ExprNumericOpsTests extends ExprNumericOpsTests with PostgresSuite + // object ExprSeqNumericOpsTests extends ExprAggNumericOpsTests with PostgresSuite + // object ExprSeqOpsTests extends ExprAggOpsTests with PostgresSuite + // object ExprStringOpsTests extends ExprStringOpsTests with PostgresSuite + // object ExprBlobOpsTests extends ExprBlobOpsTests with PostgresSuite + // object ExprMathOpsTests extends ExprMathOpsTests with PostgresSuite + + object DataTypesTests + extends scalasql.namedtuples.datatypes.SimpleTableDataTypesTests + with PostgresSuite + + object OptionalTests + extends scalasql.namedtuples.datatypes.SimpleTableOptionalTests + with PostgresSuite + + // object PostgresDialectTests extends PostgresDialectTests + +} + +package hikari { + + import scalasql.utils.HikariSuite + + // object DbApiTests extends DbApiTests with HikariSuite + // object TransactionTests extends TransactionTests with HikariSuite + + // object SelectTests extends SelectTests with HikariSuite + // object JoinTests extends JoinTests with HikariSuite + // object FlatJoinTests extends FlatJoinTests with HikariSuite + // object InsertTests extends InsertTests with HikariSuite + // object UpdateTests extends UpdateTests with HikariSuite + // object DeleteTests extends DeleteTests with HikariSuite + // object CompoundSelectTests extends CompoundSelectTests with HikariSuite + // object UpdateJoinTests extends UpdateJoinTests with HikariSuite + // object UpdateSubQueryTests extends UpdateSubQueryTests with HikariSuite + // object ReturningTests extends ReturningTests with HikariSuite + // object OnConflictTests extends OnConflictTests with HikariSuite + // object ValuesTests extends ValuesTests with HikariSuite + // object LateralJoinTests extends LateralJoinTests with HikariSuite + // object WindowFunctionTests extends WindowFunctionTests with HikariSuite + // object GetGeneratedKeysTests extends GetGeneratedKeysTests with HikariSuite + // object SchemaTests extends SchemaTests with HikariSuite + // object EscapedTableNameTests extends EscapedTableNameTests with HikariSuite + // object EscapedTableNameWithReturningTests + // extends EscapedTableNameWithReturningTests + // with HikariSuite + + // object SubQueryTests extends SubQueryTests with HikariSuite + // object WithCteTests extends WithCteTests with HikariSuite + + // object DbApiOpsTests extends DbApiOpsTests with HikariSuite + // object ExprOpsTests extends ExprOpsTests with HikariSuite + // object ExprBooleanOpsTests extends ExprBooleanOpsTests with HikariSuite + // object ExprNumericOpsTests extends ExprNumericOpsTests with HikariSuite + // object ExprSeqNumericOpsTests extends ExprAggNumericOpsTests with HikariSuite + // object ExprSeqOpsTests extends ExprAggOpsTests with HikariSuite + // object ExprStringOpsTests extends ExprStringOpsTests with HikariSuite + // object ExprBlobOpsTests extends ExprBlobOpsTests with HikariSuite + // object ExprMathOpsTests extends ExprMathOpsTests with HikariSuite + + object DataTypesTests + extends scalasql.namedtuples.datatypes.SimpleTableDataTypesTests + with HikariSuite + + object OptionalTests + extends scalasql.namedtuples.datatypes.SimpleTableOptionalTests + with HikariSuite + + // object PostgresDialectTests extends PostgresDialectTests + +} + +package mysql { + + import scalasql.utils.MySqlSuite + + // object DbApiTests extends DbApiTests with MySqlSuite + // object TransactionTests extends TransactionTests with MySqlSuite + + // object SelectTests extends SelectTests with MySqlSuite + // object JoinTests extends JoinTests with MySqlSuite + // object FlatJoinTests extends FlatJoinTests with MySqlSuite + // object InsertTests extends InsertTests with MySqlSuite + // object UpdateTests extends UpdateTests with MySqlSuite + // object DeleteTests extends DeleteTests with MySqlSuite + // object CompoundSelectTests extends CompoundSelectTests with MySqlSuite + // object UpdateJoinTests extends UpdateJoinTests with MySqlSuite + // MySql does not support updates with subqueries referencing same table + // object UpdateSubQueryTests extends UpdateSubQueryTests with MySqlSuite + // MySql does not support INSERT/UPDATE RETURNING + // object ReturningTests extends ReturningTests with MySqlSuite + // MySql does not support onConflictIgnore and onConflictUpdate does not take columns + // object OnConflictTests extends OnConflictTests with MySqlSuite + // object ValuesTests extends ValuesTests with MySqlSuite + // object LateralJoinTests extends LateralJoinTests with MySqlSuite + // object WindowFunctionTests extends WindowFunctionTests with MySqlSuite + // object GetGeneratedKeysTests extends GetGeneratedKeysTests with MySqlSuite + + // object SubQueryTests extends SubQueryTests with MySqlSuite + // object WithCteTests extends WithCteTests with MySqlSuite + + // object DbApiOpsTests extends DbApiOpsTests with MySqlSuite + // object ExprOpsTests extends ExprOpsTests with MySqlSuite + // object ExprBooleanOpsTests extends ExprBooleanOpsTests with MySqlSuite + // object ExprNumericOpsTests extends ExprNumericOpsTests with MySqlSuite + // object ExprSeqNumericOpsTests extends ExprAggNumericOpsTests with MySqlSuite + // object ExprSeqOpsTests extends ExprAggOpsTests with MySqlSuite + // object ExprStringOpsTests extends ExprStringOpsTests with MySqlSuite + // object ExprBlobOpsTests extends ExprBlobOpsTests with MySqlSuite + // object ExprMathOpsTests extends ExprMathOpsTests with MySqlSuite + // In MySql, schemas are databases and this requires special treatment not yet implemented here + // object SchemaTests extends SchemaTests with MySqlSuite + // object EscapedTableNameTests extends EscapedTableNameTests with MySqlSuite + + object DataTypesTests + extends scalasql.namedtuples.datatypes.SimpleTableDataTypesTests + with MySqlSuite + object OptionalTests + extends scalasql.namedtuples.datatypes.SimpleTableOptionalTests + with MySqlSuite + + // object MySqlDialectTests extends MySqlDialectTests +} + +package sqlite { + + import scalasql.utils.SqliteSuite + + // object DbApiTests extends DbApiTests with SqliteSuite + // object TransactionTests extends TransactionTests with SqliteSuite + + // object SelectTests extends SelectTests with SqliteSuite + // object JoinTests extends JoinTests with SqliteSuite + // object FlatJoinTests extends FlatJoinTests with SqliteSuite + // object InsertTests extends InsertTests with SqliteSuite + // object UpdateTests extends UpdateTests with SqliteSuite + // object DeleteTests extends DeleteTests with SqliteSuite + // object CompoundSelectTests extends CompoundSelectTests with SqliteSuite + // object UpdateJoinTests extends UpdateJoinTests with SqliteSuite + // object UpdateSubQueryTests extends UpdateSubQueryTests with SqliteSuite + // object ReturningTests extends ReturningTests with SqliteSuite + // object OnConflictTests extends OnConflictTests with SqliteSuite + // object ValuesTests extends ValuesTests with SqliteSuite + // Sqlite does not support lateral joins + // object LateralJoinTests extends LateralJoinTests with SqliteSuite + // object WindowFunctionTests extends WindowFunctionTests with SqliteSuite + // Sqlite does not support getGeneratedKeys https://github.com/xerial/sqlite-jdbc/issues/980 + // object GetGeneratedKeysTests extends GetGeneratedKeysTests with SqliteSuite + + // object SubQueryTests extends SubQueryTests with SqliteSuite + // object WithCteTests extends WithCteTests with SqliteSuite + + // object DbApiOpsTests extends DbApiOpsTests with SqliteSuite + // object ExprOpsTests extends ExprOpsTests with SqliteSuite + // object ExprBooleanOpsTests extends ExprBooleanOpsTests with SqliteSuite + // object ExprNumericOpsTests extends ExprNumericOpsTests with SqliteSuite + // object ExprSeqNumericOpsTests extends ExprAggNumericOpsTests with SqliteSuite + // object ExprSeqOpsTests extends ExprAggOpsTests with SqliteSuite + // object ExprStringOpsTests extends ExprStringOpsTests with SqliteSuite + // object ExprBlobOpsTests extends ExprBlobOpsTests with SqliteSuite + // Sqlite doesn't support all these math operations + // object ExprMathOpsTests extends ExprMathOpsTests with SqliteSuite + // Sqlite doesn't support schemas + // object SchemaTests extends SchemaTests with SqliteSuite + // object EscapedTableNameTests extends EscapedTableNameTests with SqliteSuite + // object EscapedTableNameWithReturningTests + // extends EscapedTableNameWithReturningTests + // with SqliteSuite + + object DataTypesTests + extends scalasql.namedtuples.datatypes.SimpleTableDataTypesTests + with SqliteSuite + object OptionalTests + extends scalasql.namedtuples.datatypes.SimpleTableOptionalTests + with SqliteSuite + + // object SqliteDialectTests extends SqliteDialectTests +} + +package h2 { + + import scalasql.utils.H2Suite + + // object DbApiTests extends DbApiTests with H2Suite + // object TransactionTests extends TransactionTests with H2Suite + + // object SelectTests extends SelectTests with H2Suite + // object JoinTests extends JoinTests with H2Suite + // object FlatJoinTests extends FlatJoinTests with H2Suite + // object InsertTests extends InsertTests with H2Suite + // object UpdateTests extends UpdateTests with H2Suite + // object DeleteTests extends DeleteTests with H2Suite + // object CompoundSelectTests extends CompoundSelectTests with H2Suite + // object UpdateJoinTests extends UpdateTests with H2Suite + // object UpdateSubQueryTests extends UpdateSubQueryTests with H2Suite + // H2 does not support RETURNING keyword + // object ReturningTests extends ReturningTests with H2Suite + // H2 does not support ON CONFLICT IGNORE unless in postgres mode + // object OnConflictTests extends OnConflictTests with H2Suite + // object ValuesTests extends ValuesTests with H2Suite + // // H2 does not support lateral joins + // // object LateralJoinTests extends LateralJoinTests with H2Suite + // object WindowFunctionTests extends WindowFunctionTests with H2Suite + // object GetGeneratedKeysTests extends GetGeneratedKeysTests with H2Suite + // object SchemaTests extends SchemaTests with H2Suite + // object EscapedTableNameTests extends EscapedTableNameTests with H2Suite + + // object SubQueryTests extends SubQueryTests with H2Suite + // object WithCteTests extends WithCteTests with H2Suite + + // object DbApiOpsTests extends DbApiOpsTests with H2Suite + // object ExprOpsTests extends ExprOpsTests with H2Suite + // object ExprBooleanOpsTests extends ExprBooleanOpsTests with H2Suite + // object ExprNumericOpsTests extends ExprNumericOpsTests with H2Suite + // object ExprSeqNumericOpsTests extends ExprAggNumericOpsTests with H2Suite + // object ExprSeqOpsTests extends ExprAggOpsTests with H2Suite + // object ExprStringOpsTests extends ExprStringOpsTests with H2Suite + // object ExprBlobOpsTests extends ExprBlobOpsTests with H2Suite + // object ExprMathOpsTests extends ExprMathOpsTests with H2Suite + + object DataTypesTests + extends scalasql.namedtuples.datatypes.SimpleTableDataTypesTests + with H2Suite + object OptionalTests extends scalasql.namedtuples.datatypes.SimpleTableOptionalTests with H2Suite + + // object H2DialectTests extends H2DialectTests +} diff --git a/scalasql/namedtuples/test/src/SimpleTableExampleTests.scala b/scalasql/namedtuples/test/src/SimpleTableExampleTests.scala new file mode 100644 index 00000000..f255348d --- /dev/null +++ b/scalasql/namedtuples/test/src/SimpleTableExampleTests.scala @@ -0,0 +1,10 @@ +package scalasql.namedtuples + +import utest._ + +object SimpleTableExampleTests extends TestSuite: + def tests = Tests: + test("postgres") - example.SimpleTablePostgresExample.main(Array.empty) + test("mysql") - example.SimpleTableMySqlExample.main(Array.empty) + test("h2") - example.SimpleTableH2Example.main(Array.empty) + test("sqlite") - example.SimpleTableSqliteExample.main(Array.empty) diff --git a/scalasql/namedtuples/test/src/datatypes/LargeObjectTest.scala b/scalasql/namedtuples/test/src/datatypes/LargeObjectTest.scala new file mode 100644 index 00000000..4c4844b1 --- /dev/null +++ b/scalasql/namedtuples/test/src/datatypes/LargeObjectTest.scala @@ -0,0 +1,627 @@ +package scalasql.namedtuples.datatypes + +import scalasql.simple.{*, given} + +case class LargeObjectA( + a0: Int, + a1: Int, + a2: Int, + a3: Int, + a4: Int, + a5: Int, + a6: Int, + a7: Int, + a8: Int, + a9: Int, + a10: Int, + a11: Int, + a12: Int, + a13: Int, + a14: Int, + a15: Int, + a16: Int, + a17: Int, + a18: Int, + a19: Int, + a20: Int, + a21: Int, + a22: Int, + a23: Int, + a24: Int, + a25: Int, + a26: Int, + a27: Int, + a28: Int, + a29: Int, + a30: Int, + a31: Int, + a32: Int, + a33: Int, + a34: Int, + a35: Int, + a36: Int, + a37: Int, + a38: Int, + a39: Int, + a40: Int, + a41: Int, + a42: Int, + a43: Int, + a44: Int, + a45: Int, + a46: Int, + a47: Int, + a48: Int, + a49: Int, + a50: Int, + a51: Int, + a52: Int, + a53: Int, + a54: Int, + a55: Int, + a56: Int, + a57: Int, + a58: Int, + a59: Int, + a60: Int, + a61: Int, + a62: Int, + a63: Int, + a64: Int, + a65: Int, + a66: Int, + a67: Int, + a68: Int, + a69: Int, + a70: Int, + a71: Int, + a72: Int, + a73: Int, + a74: Int, + a75: Int, + a76: Int, + a77: Int, + a78: Int, + a79: Int, + a80: Int, + a81: Int, + a82: Int, + a83: Int, + a84: Int, + a85: Int, + a86: Int, + a87: Int, + a88: Int, + a89: Int, + a90: Int, + a91: Int, + a92: Int, + a93: Int, + a94: Int, + a95: Int, + a96: Int, + a97: Int, + a98: Int, + a99: Int, + a100: Int, + a101: Int, + a102: Int, + a103: Int, + a104: Int, + a105: Int, + a106: Int, + a107: Int, + a108: Int, + a109: Int, + a110: Int, + a111: Int, + a112: Int, + a113: Int, + a114: Int, + a115: Int, + a116: Int, + a117: Int, + a118: Int, + a119: Int, + a120: Int, + a121: Int, + a122: Int, + a123: Int, + a124: Int, + a125: Int, + a126: Int, + a127: Int, + a128: Int, + a129: Int, + a130: Int, + a131: Int, + a132: Int, + a133: Int, + a134: Int, + a135: Int, + a136: Int, + a137: Int, + a138: Int, + a139: Int, + a140: Int, + a141: Int, + a142: Int, + a143: Int, + a144: Int, + a145: Int, + a146: Int, + a147: Int, + a148: Int, + a149: Int, + a150: Int, + a151: Int, + a152: Int, + a153: Int, + a154: Int, + a155: Int, + a156: Int, + a157: Int, + a158: Int, + a159: Int, + a160: Int, + a161: Int, + a162: Int, + a163: Int, + a164: Int, + a165: Int, + a166: Int, + a167: Int, + a168: Int, + a169: Int, + a170: Int, + a171: Int, + a172: Int, + a173: Int, + a174: Int, + a175: Int, + a176: Int, + a177: Int, + a178: Int, + a179: Int, + a180: Int, + a181: Int, + a182: Int, + a183: Int, + a184: Int, + a185: Int, + a186: Int, + a187: Int, + a188: Int, + a189: Int, + a190: Int, + a191: Int, + a192: Int, + a193: Int, + a194: Int, + a195: Int, + a196: Int, + a197: Int, + a198: Int, + a199: Int, + b: LargeObjectB +) +object LargeObjectA extends SimpleTable[LargeObjectA] + +case class LargeObjectB( + b0: Int, + b1: Int, + b2: Int, + b3: Int, + b4: Int, + b5: Int, + b6: Int, + b7: Int, + b8: Int, + b9: Int, + b10: Int, + b11: Int, + b12: Int, + b13: Int, + b14: Int, + b15: Int, + b16: Int, + b17: Int, + b18: Int, + b19: Int, + b20: Int, + b21: Int, + b22: Int, + b23: Int, + b24: Int, + b25: Int, + b26: Int, + b27: Int, + b28: Int, + b29: Int, + b30: Int, + b31: Int, + b32: Int, + b33: Int, + b34: Int, + b35: Int, + b36: Int, + b37: Int, + b38: Int, + b39: Int, + b40: Int, + b41: Int, + b42: Int, + b43: Int, + b44: Int, + b45: Int, + b46: Int, + b47: Int, + b48: Int, + b49: Int, + b50: Int, + b51: Int, + b52: Int, + b53: Int, + b54: Int, + b55: Int, + b56: Int, + b57: Int, + b58: Int, + b59: Int, + b60: Int, + b61: Int, + b62: Int, + b63: Int, + b64: Int, + b65: Int, + b66: Int, + b67: Int, + b68: Int, + b69: Int, + b70: Int, + b71: Int, + b72: Int, + b73: Int, + b74: Int, + b75: Int, + b76: Int, + b77: Int, + b78: Int, + b79: Int, + b80: Int, + b81: Int, + b82: Int, + b83: Int, + b84: Int, + b85: Int, + b86: Int, + b87: Int, + b88: Int, + b89: Int, + b90: Int, + b91: Int, + b92: Int, + b93: Int, + b94: Int, + b95: Int, + b96: Int, + b97: Int, + b98: Int, + b99: Int, + b100: Int, + b101: Int, + b102: Int, + b103: Int, + b104: Int, + b105: Int, + b106: Int, + b107: Int, + b108: Int, + b109: Int, + b110: Int, + b111: Int, + b112: Int, + b113: Int, + b114: Int, + b115: Int, + b116: Int, + b117: Int, + b118: Int, + b119: Int, + b120: Int, + b121: Int, + b122: Int, + b123: Int, + b124: Int, + b125: Int, + b126: Int, + b127: Int, + b128: Int, + b129: Int, + b130: Int, + b131: Int, + b132: Int, + b133: Int, + b134: Int, + b135: Int, + b136: Int, + b137: Int, + b138: Int, + b139: Int, + b140: Int, + b141: Int, + b142: Int, + b143: Int, + b144: Int, + b145: Int, + b146: Int, + b147: Int, + b148: Int, + b149: Int, + b150: Int, + b151: Int, + b152: Int, + b153: Int, + b154: Int, + b155: Int, + b156: Int, + b157: Int, + b158: Int, + b159: Int, + b160: Int, + b161: Int, + b162: Int, + b163: Int, + b164: Int, + b165: Int, + b166: Int, + b167: Int, + b168: Int, + b169: Int, + b170: Int, + b171: Int, + b172: Int, + b173: Int, + b174: Int, + b175: Int, + b176: Int, + b177: Int, + b178: Int, + b179: Int, + b180: Int, + b181: Int, + b182: Int, + b183: Int, + b184: Int, + b185: Int, + b186: Int, + b187: Int, + b188: Int, + b189: Int, + b190: Int, + b191: Int, + b192: Int, + b193: Int, + b194: Int, + b195: Int, + b196: Int, + b197: Int, + b198: Int, + b199: Int +) extends SimpleTable.Nested +object LargeObjectB extends SimpleTable[LargeObjectB] + +// this compiles with the full 200 fields but is too slow, +// so hopefully 75 is large enough. (works with -Xss900k) +def foo: Unit = { + import H2Dialect.* + def exprOf(i: Int): Expr[Int] = i + val db: DbApi = ??? + val m = db.run( + LargeObjectB.select.map(_ => + ( + b0 = exprOf(0), + b1 = exprOf(1), + b2 = exprOf(2), + b3 = exprOf(3), + b4 = exprOf(4), + b5 = exprOf(5), + b6 = exprOf(6), + b7 = exprOf(7), + b8 = exprOf(8), + b9 = exprOf(9), + b10 = exprOf(10), + b11 = exprOf(11), + b12 = exprOf(12), + b13 = exprOf(13), + b14 = exprOf(14), + b15 = exprOf(15), + b16 = exprOf(16), + b17 = exprOf(17), + b18 = exprOf(18), + b19 = exprOf(19), + b20 = exprOf(20), + b21 = exprOf(21), + b22 = exprOf(22), + b23 = exprOf(23), + b24 = exprOf(24), + b25 = exprOf(25), + b26 = exprOf(26), + b27 = exprOf(27), + b28 = exprOf(28), + b29 = exprOf(29), + b30 = exprOf(30), + b31 = exprOf(31), + b32 = exprOf(32), + b33 = exprOf(33), + b34 = exprOf(34), + b35 = exprOf(35), + b36 = exprOf(36), + b37 = exprOf(37), + b38 = exprOf(38), + b39 = exprOf(39), + b40 = exprOf(40), + b41 = exprOf(41), + b42 = exprOf(42), + b43 = exprOf(43), + b44 = exprOf(44), + b45 = exprOf(45), + b46 = exprOf(46), + b47 = exprOf(47), + b48 = exprOf(48), + b49 = exprOf(49), + b50 = exprOf(50), + b51 = exprOf(51), + b52 = exprOf(52), + b53 = exprOf(53), + b54 = exprOf(54), + b55 = exprOf(55), + b56 = exprOf(56), + b57 = exprOf(57), + b58 = exprOf(58), + b59 = exprOf(59), + b60 = exprOf(60), + b61 = exprOf(61), + b62 = exprOf(62), + b63 = exprOf(63), + b64 = exprOf(64), + b65 = exprOf(65), + b66 = exprOf(66), + b67 = exprOf(67), + b68 = exprOf(68), + b69 = exprOf(69), + b70 = exprOf(70), + b71 = exprOf(71), + b72 = exprOf(72), + b73 = exprOf(73), + b74 = exprOf(74) + // b75 = exprOf(75), + // b76 = exprOf(76), + // b77 = exprOf(77), + // b78 = exprOf(78), + // b79 = exprOf(79), + // b80 = exprOf(80), + // b81 = exprOf(81), + // b82 = exprOf(82), + // b83 = exprOf(83), + // b84 = exprOf(84), + // b85 = exprOf(85), + // b86 = exprOf(86), + // b87 = exprOf(87), + // b88 = exprOf(88), + // b89 = exprOf(89), + // b90 = exprOf(90), + // b91 = exprOf(91), + // b92 = exprOf(92), + // b93 = exprOf(93), + // b94 = exprOf(94), + // b95 = exprOf(95), + // b96 = exprOf(96), + // b97 = exprOf(97), + // b98 = exprOf(98), + // b99 = exprOf(99), + // b100 = exprOf(100), + // b101 = exprOf(101), + // b102 = exprOf(102), + // b103 = exprOf(103), + // b104 = exprOf(104), + // b105 = exprOf(105), + // b106 = exprOf(106), + // b107 = exprOf(107), + // b108 = exprOf(108), + // b109 = exprOf(109), + // b110 = exprOf(110), + // b111 = exprOf(111), + // b112 = exprOf(112), + // b113 = exprOf(113), + // b114 = exprOf(114), + // b115 = exprOf(115), + // b116 = exprOf(116), + // b117 = exprOf(117), + // b118 = exprOf(118), + // b119 = exprOf(119), + // b120 = exprOf(120), + // b121 = exprOf(121), + // b122 = exprOf(122), + // b123 = exprOf(123), + // b124 = exprOf(124), + // b125 = exprOf(125), + // b126 = exprOf(126), + // b127 = exprOf(127), + // b128 = exprOf(128), + // b129 = exprOf(129), + // b130 = exprOf(130), + // b131 = exprOf(131), + // b132 = exprOf(132), + // b133 = exprOf(133), + // b134 = exprOf(134), + // b135 = exprOf(135), + // b136 = exprOf(136), + // b137 = exprOf(137), + // b138 = exprOf(138), + // b139 = exprOf(139), + // b140 = exprOf(140), + // b141 = exprOf(141), + // b142 = exprOf(142), + // b143 = exprOf(143), + // b144 = exprOf(144), + // b145 = exprOf(145), + // b146 = exprOf(146), + // b147 = exprOf(147), + // b148 = exprOf(148), + // b149 = exprOf(149), + // b150 = exprOf(150), + // b151 = exprOf(151), + // b152 = exprOf(152), + // b153 = exprOf(153), + // b154 = exprOf(154), + // b155 = exprOf(155), + // b156 = exprOf(156), + // b157 = exprOf(157), + // b158 = exprOf(158), + // b159 = exprOf(159), + // b160 = exprOf(160), + // b161 = exprOf(161), + // b162 = exprOf(162), + // b163 = exprOf(163), + // b164 = exprOf(164), + // b165 = exprOf(165), + // b166 = exprOf(166), + // b167 = exprOf(167), + // b168 = exprOf(168), + // b169 = exprOf(169), + // b170 = exprOf(170), + // b171 = exprOf(171), + // b172 = exprOf(172), + // b173 = exprOf(173), + // b174 = exprOf(174), + // b175 = exprOf(175), + // b176 = exprOf(176), + // b177 = exprOf(177), + // b178 = exprOf(178), + // b179 = exprOf(179), + // b180 = exprOf(180), + // b181 = exprOf(181), + // b182 = exprOf(182), + // b183 = exprOf(183), + // b184 = exprOf(184), + // b185 = exprOf(185), + // b186 = exprOf(186), + // b187 = exprOf(187), + // b188 = exprOf(188), + // b189 = exprOf(189), + // b190 = exprOf(190), + // b191 = exprOf(191), + // b192 = exprOf(192), + // b193 = exprOf(193), + // b194 = exprOf(194), + // b195 = exprOf(195), + // b196 = exprOf(196), + // b197 = exprOf(197), + // b198 = exprOf(198), + // b199 = exprOf(199) + ) + ) + ) + val _: Seq[NamedTuple.Take[NamedTuple.From[LargeObjectB], 75]] = m +} diff --git a/scalasql/namedtuples/test/src/datatypes/SimpleTableDataTypesTests.scala b/scalasql/namedtuples/test/src/datatypes/SimpleTableDataTypesTests.scala new file mode 100644 index 00000000..1e94ab19 --- /dev/null +++ b/scalasql/namedtuples/test/src/datatypes/SimpleTableDataTypesTests.scala @@ -0,0 +1,240 @@ +package scalasql.namedtuples.datatypes + +import scalasql.simple.{*, given} +import scalasql.utils.ScalaSqlSuite + +import sourcecode.Text +import utest._ + +import java.time.{ + Instant, + LocalDate, + LocalDateTime, + LocalTime, + OffsetDateTime, + ZoneId, + ZonedDateTime +} +import java.util.Date +import java.text.SimpleDateFormat + +import _root_.test.scalasql.WorldSqlTests.ArrowAssert + +case class Nested( + fooId: Int, + myBoolean: Boolean +) extends SimpleTable.Nested +object Nested extends SimpleTable[Nested] + +case class Enclosing( + barId: Int, + myString: String, + foo: Nested +) +object Enclosing extends SimpleTable[Enclosing] + +trait SimpleTableDataTypesTests extends ScalaSqlSuite { + def description = + "Basic operations on all the data types that ScalaSql supports " + + "mapping between Database types and Scala types." + def tests = Tests { + test("constant") - checker.recorded( + """ + This example demonstrates a range of different data types being written + and read back via ScalaSQL + """, + Text { + object MyEnum extends Enumeration { + val foo, bar, baz = Value + + implicit def make: String => Value = withName + } + case class DataTypes( + myTinyInt: Byte, + mySmallInt: Short, + myInt: Int, + myBigInt: Long, + myDouble: Double, + myBoolean: Boolean, + myLocalDate: LocalDate, + myLocalTime: LocalTime, + myLocalDateTime: LocalDateTime, + myUtilDate: Date, + myInstant: Instant, + myVarBinary: geny.Bytes, + myUUID: java.util.UUID, + myEnum: MyEnum.Value + ) + + object DataTypes extends SimpleTable[DataTypes] + + val value = DataTypes( + myTinyInt = 123.toByte, + mySmallInt = 12345.toShort, + myInt = 12345678, + myBigInt = 12345678901L, + myDouble = 3.14, + myBoolean = true, + myLocalDate = LocalDate.parse("2023-12-20"), + myLocalTime = LocalTime.parse("10:15:30"), + myLocalDateTime = LocalDateTime.parse("2011-12-03T10:15:30"), + myUtilDate = + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS").parse("2011-12-03T10:15:30.000"), + myInstant = Instant.parse("2011-12-03T10:15:30Z"), + myVarBinary = new geny.Bytes(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8)), + myUUID = new java.util.UUID(1234567890L, 9876543210L), + myEnum = MyEnum.bar + ) + + db.run( + DataTypes.insert.columns( + _.myTinyInt := value.myTinyInt, + _.mySmallInt := value.mySmallInt, + _.myInt := value.myInt, + _.myBigInt := value.myBigInt, + _.myDouble := value.myDouble, + _.myBoolean := value.myBoolean, + _.myLocalDate := value.myLocalDate, + _.myLocalTime := value.myLocalTime, + _.myLocalDateTime := value.myLocalDateTime, + _.myUtilDate := value.myUtilDate, + _.myInstant := value.myInstant, + _.myVarBinary := value.myVarBinary, + _.myUUID := value.myUUID, + _.myEnum := value.myEnum + ) + ) ==> 1 + + db.run(DataTypes.select) ==> Seq(value) + } + ) + + test("nonRoundTrip") - checker.recorded( + """ + In general, databases do not store timezones and offsets together with their timestamps: + "TIMESTAMP WITH TIMEZONE" is a lie and it actually stores UTC and renders to whatever + timezone the client queries it from. Thus values of type `OffsetDateTime` can preserve + their instant, but cannot be round-tripped preserving the offset. + """, + Text { + + case class NonRoundTripTypes( + myZonedDateTime: ZonedDateTime, + myOffsetDateTime: OffsetDateTime + ) + + object NonRoundTripTypes extends SimpleTable[NonRoundTripTypes] + + val value = NonRoundTripTypes( + myZonedDateTime = ZonedDateTime.parse("2011-12-03T10:15:30+01:00[Europe/Paris]"), + myOffsetDateTime = OffsetDateTime.parse("2011-12-03T10:15:30+00:00") + ) + + def normalize(v: NonRoundTripTypes) = v.copy( + myZonedDateTime = v.myZonedDateTime.withZoneSameInstant(ZoneId.systemDefault), + myOffsetDateTime = v.myOffsetDateTime.withOffsetSameInstant(OffsetDateTime.now.getOffset) + ) + + db.run( + NonRoundTripTypes.insert.columns( + _.myOffsetDateTime := value.myOffsetDateTime, + _.myZonedDateTime := value.myZonedDateTime + ) + ) ==> 1 + + db.run(NonRoundTripTypes.select).map(normalize) ==> Seq(normalize(value)) + } + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("enclosing - with SimpleTable") - checker.recorded( + """ + You can nest `case class`es in other `case class`es to DRY up common sets of + table columns. These nested `case class`es have their columns flattened out + into the enclosing `case class`'s columns, such that at the SQL level it is + all flattened out without nesting. + + **Important**: When using nested `case class`es with `SimpleTable`, + make sure to extend `SimpleTable.Nested` in the nested class. + """, + Text { + // case class Nested( + // fooId: Int, + // myBoolean: Boolean, + // ) extends SimpleTable.Nested + // object Nested extends SimpleTable[Nested] + // + // case class Enclosing( + // barId: Int, + // myString: String, + // foo: Nested + // ) + // object Enclosing extends SimpleTable[Enclosing] + val value1 = Enclosing( + barId = 1337, + myString = "hello", + foo = Nested( + fooId = 271828, + myBoolean = true + ) + ) + val value2 = Enclosing( + barId = 31337, + myString = "world", + foo = Nested( + fooId = 1618, + myBoolean = false + ) + ) + + val insertColumns = Enclosing.insert.columns( + _.barId := value1.barId, + _.myString := value1.myString, + _.foo.fooId := value1.foo.fooId, + _.foo.myBoolean := value1.foo.myBoolean + ) + db.renderSql(insertColumns) ==> + "INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) VALUES (?, ?, ?, ?)" + + db.run(insertColumns) ==> 1 + + val insertValues = Enclosing.insert.values(value2) + db.renderSql(insertValues) ==> + "INSERT INTO enclosing (bar_id, my_string, foo_id, my_boolean) VALUES (?, ?, ?, ?)" + + db.run(insertValues) ==> 1 + + db.renderSql(Enclosing.select) ==> """ + SELECT + enclosing0.bar_id AS bar_id, + enclosing0.my_string AS my_string, + enclosing0.foo_id AS foo_id, + enclosing0.my_boolean AS my_boolean + FROM enclosing enclosing0 + """ + + db.run(Enclosing.select) ==> Seq(value1, value2) + + } + ) + test("JoinNullable proper type mapping") - checker.recorded( + "", + Text { + case class A(id: Int, bId: Option[Int]) + object A extends SimpleTable[A] + + object Custom extends Enumeration { + val Foo, Bar = Value + + implicit def make: String => Value = withName + } + + case class B(id: Int, custom: Custom.Value) + object B extends SimpleTable[B] + db.run(A.insert.columns(_.id := 1, _.bId := None)) + val result = db.run(A.select.leftJoin(B)(_.id === _.id).single) + result._2 ==> None + } + ) + } +} diff --git a/scalasql/namedtuples/test/src/datatypes/SimpleTableOptionalTests.scala b/scalasql/namedtuples/test/src/datatypes/SimpleTableOptionalTests.scala new file mode 100644 index 00000000..19701594 --- /dev/null +++ b/scalasql/namedtuples/test/src/datatypes/SimpleTableOptionalTests.scala @@ -0,0 +1,616 @@ +package scalasql.namedtuples.datatypes + +import scalasql.simple.{*, given} +import scalasql.utils.ScalaSqlSuite + +import utest._ +import sourcecode.Text + +import java.time.{ + Instant, + LocalDate, + LocalDateTime, + LocalTime, + OffsetDateTime, + ZoneId, + ZonedDateTime +} +import java.util.Date +import java.text.SimpleDateFormat +import java.util.UUID + +case class OptCols(myInt: Option[Int], myInt2: Option[Int]) + +object OptCols extends SimpleTable[OptCols] + +trait SimpleTableOptionalTests extends ScalaSqlSuite { + def description = + "Queries using columns that may be `NULL`, `Expr[Option[T]]` or `Option[T]` in Scala" + override def utestBeforeEach(path: Seq[String]): Unit = checker.reset() + def tests = Tests { + + checker( + query = Text { + OptCols.insert.batched(_.myInt, _.myInt2)( + (None, None), + (Some(1), Some(2)), + (Some(3), None), + (None, Some(4)) + ) + }, + value = 4 + )(using implicitly, utest.framework.TestPath(Nil)) + + test("selectAll") - checker( + query = Text { OptCols.select }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(None, Some(4)) + ), + docs = """ + Nullable columns are modelled as `Option[V]` fields on your `case class`, + and are returned to you as `Option[V]` values when you run a query. These + can be `Some` or `None` + """ + ) + + test("groupByMaxGet") - checker( + query = Text { OptCols.select.groupBy(_.myInt)(_.maxByOpt(_.myInt2.get)) }, + sql = """ + SELECT opt_cols0.my_int AS res_0, MAX(opt_cols0.my_int2) AS res_1 + FROM opt_cols opt_cols0 + GROUP BY opt_cols0.my_int + """, + value = Seq(None -> Some(4), Some(1) -> Some(2), Some(3) -> None), + normalize = (x: Seq[(Option[Int], Option[Int])]) => x.sorted, + docs = """ + Some aggregates return `Expr[Option[V]]`s, et.c. `.maxByOpt` + """ + ) + + test("isDefined") - checker( + query = Text { OptCols.select.filter(_.myInt.isDefined) }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS NOT NULL)""", + value = Seq(OptCols(Some(1), Some(2)), OptCols(Some(3), None)), + docs = """ + `.isDefined` on `Expr[Option[V]]` translates to a SQL + `IS NOT NULL` check + """ + ) + + test("isEmpty") - checker( + query = Text { OptCols.select.filter(_.myInt.isEmpty) }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS NULL)""", + value = Seq(OptCols(None, None), OptCols(None, Some(4))), + docs = """ + `.isEmpty` on `Expr[Option[V]]` translates to a SQL + `IS NULL` check + """ + ) + + test("sqlEquals") { + test("nonOptionHit") - checker( + query = Text { OptCols.select.filter(_.myInt `=` 1) }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int = ?) + """, + value = Seq(OptCols(Some(1), Some(2))), + docs = """ + Backticked `=` equality in ScalaSQL translates to a raw `=` + in SQL. This follows SQL `NULL` semantics, meaning that + `None = None` returns `false` rather than `true` + """ + ) + + test("nonOptionMiss") - checker( + query = Text { OptCols.select.filter(_.myInt `=` 2) }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int = ?) + """, + value = Seq[OptCols]() + ) + + test("optionMiss") - checker( // SQL null = null is false + query = Text { OptCols.select.filter(_.myInt `=` Option.empty[Int]) }, + sql = """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int = ?) + """, + value = Seq[OptCols]() + ) + } + test("scalaEquals") { + test("someHit") - checker( + query = Text { OptCols.select.filter(_.myInt === Option(1)) }, + sqls = Seq( + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS NOT DISTINCT FROM ?) + """, + // MySQL syntax + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int <=> ?) + """ + ), + value = Seq(OptCols(Some(1), Some(2))), + docs = """ + `===` equality in ScalaSQL translates to a `IS NOT DISTINCT` in SQL. + This roughly follows Scala `==` semantics, meaning `None === None` + returns `true` + """ + ) + + test("noneHit") - checker( + query = Text { OptCols.select.filter(_.myInt === Option.empty[Int]) }, + sqls = Seq( + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS NOT DISTINCT FROM ?) + """, + // MySQL syntax + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int <=> ?) + """ + ), + value = Seq(OptCols(None, None), OptCols(None, Some(4))) + ) + + test("notEqualsSome") - checker( + query = Text { + OptCols.select.filter(_.myInt !== Option(1)) + }, + sqls = Seq( + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS DISTINCT FROM ?) + """, + // MySQL syntax + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (NOT (opt_cols0.my_int <=> ?)) + """ + ), + value = Seq( + OptCols(None, None), + OptCols(Some(3), None), + OptCols(None, Some(value = 4)) + ) + ) + + test("notEqualsNone") - checker( + query = Text { + OptCols.select.filter(_.myInt !== Option.empty[Int]) + }, + sqls = Seq( + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (opt_cols0.my_int IS DISTINCT FROM ?) + """, + // MySQL syntax + """ + SELECT + opt_cols0.my_int AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + WHERE (NOT (opt_cols0.my_int <=> ?)) + """ + ), + value = Seq( + OptCols(Some(1), Some(2)), + OptCols(Some(3), None) + ) + ) + + } + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("map - with SimpleTable") - checker( + query = Text { OptCols.select.map(d => d.updates(_.myInt(_.map(_ + 10)))) }, + sql = """ + SELECT + (opt_cols0.my_int + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(11), Some(2)), + OptCols(Some(13), None), + OptCols(None, Some(4)) + ), + docs = """ + You can use operators like `.map` and `.flatMap` to work with + your `Expr[Option[V]]` values. These roughly follow the semantics + that you would be familiar with from Scala. + """ + ) + + test("map2") - checker( + query = Text { OptCols.select.map(_.myInt.map(_ + 10)) }, + sql = "SELECT (opt_cols0.my_int + ?) AS res FROM opt_cols opt_cols0", + value = Seq(None, Some(11), Some(13), None) + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("flatMap - with SimpleTable") - checker( + query = Text { + OptCols.select + .map(d => d.updates(_.myInt(_.flatMap(v => d.myInt2.map(v2 => v + v2 + 10))))) + }, + sql = """ + SELECT + ((opt_cols0.my_int + opt_cols0.my_int2) + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(13), Some(2)), + // because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too + OptCols(None, None), + OptCols(None, Some(4)) + ) + ) + + test("mapGet") - checker( + query = Text { + OptCols.select.map(d => d.updates(_.myInt(_.map(_ + d.myInt2.get + 1)))) + }, + sql = """ + SELECT + ((opt_cols0.my_int + opt_cols0.my_int2) + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(4), Some(2)), + // because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too + OptCols(None, None), + OptCols(None, Some(4)) + ), + docs = """ + You can use `.get` to turn an `Expr[Option[V]]` into an `Expr[V]`. This follows + SQL semantics, such that `NULL`s anywhere in that selected column automatically + will turn the whole column `None` (if it's an `Expr[Option[V]]` column) or `null` + (if it's not an optional column) + """ + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("rawGet - with SimpleTable") - checker( + query = Text { + OptCols.select.map(d => d.updates(_.myInt := d.myInt.get + d.myInt2.get + 1)) + }, + sql = """ + SELECT + ((opt_cols0.my_int + opt_cols0.my_int2) + ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(4), Some(2)), + // because my_int2 is added to my_int, and my_int2 is null, my_int becomes null too + OptCols(None, None), + OptCols(None, Some(4)) + ) + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("getOrElse - with SimpleTable") - checker( + query = Text { OptCols.select.map(d => d.updates(_.myInt(_.getOrElse(-1)))) }, + sql = """ + SELECT + COALESCE(opt_cols0.my_int, ?) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(Some(-1), None), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(Some(-1), Some(4)) + ) + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("orElse - with SimpleTable") - checker( + query = Text { OptCols.select.map(d => d.updates(_.myInt(_.orElse(d.myInt2)))) }, + sql = """ + SELECT + COALESCE(opt_cols0.my_int, opt_cols0.my_int2) AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(Some(4), Some(4)) + ) + ) + + // !! Important: '- with SimpleTable' so it will be detected by generateDocs.mill + test("filter - with SimpleTable") - checker( + query = Text { OptCols.select.map(d => d.updates(_.myInt(_.filter(_ < 2)))) }, + sql = """ + SELECT + CASE + WHEN (opt_cols0.my_int < ?) THEN opt_cols0.my_int + ELSE NULL + END AS my_int, + opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + """, + value = Seq( + OptCols(None, None), + OptCols(Some(1), Some(2)), + OptCols(None, None), + OptCols(None, Some(4)) + ), + docs = """ + `.filter` follows normal Scala semantics, and translates to a `CASE`/`WHEN (foo)`/`ELSE NULL` + """ + ) + test("sorting") { + test("nullsLast") - checker( + query = Text { OptCols.select.sortBy(_.myInt).nullsLast }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int NULLS LAST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int IS NULL ASC, my_int + """ + ), + value = Seq( + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(None, None), + OptCols(None, Some(4)) + ), + docs = """ + `.nullsLast` and `.nullsFirst` translate to SQL `NULLS LAST` and `NULLS FIRST` clauses + """ + ) + test("nullsFirst") - checker( + query = Text { OptCols.select.sortBy(_.myInt).nullsFirst }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int NULLS FIRST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int IS NULL DESC, my_int + """ + ), + value = Seq( + OptCols(None, None), + OptCols(None, Some(4)), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None) + ) + ) + test("ascNullsLast") - checker( + query = Text { OptCols.select.sortBy(_.myInt).asc.nullsLast }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int ASC NULLS LAST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int IS NULL ASC, my_int ASC + """ + ), + value = Seq( + OptCols(Some(1), Some(2)), + OptCols(Some(3), None), + OptCols(None, None), + OptCols(None, Some(4)) + ) + ) + test("ascNullsFirst") - checker( + query = Text { OptCols.select.sortBy(_.myInt).asc.nullsFirst }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int ASC NULLS FIRST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int ASC + """ + ), + value = Seq( + OptCols(None, None), + OptCols(None, Some(4)), + OptCols(Some(1), Some(2)), + OptCols(Some(3), None) + ) + ) + test("descNullsLast") - checker( + query = Text { OptCols.select.sortBy(_.myInt).desc.nullsLast }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int DESC NULLS LAST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int DESC + """ + ), + value = Seq( + OptCols(Some(3), None), + OptCols(Some(1), Some(2)), + OptCols(None, None), + OptCols(None, Some(4)) + ) + ) + test("descNullsFirst") - checker( + query = Text { OptCols.select.sortBy(_.myInt).desc.nullsFirst }, + sqls = Seq( + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int DESC NULLS FIRST + """, + """ + SELECT opt_cols0.my_int AS my_int, opt_cols0.my_int2 AS my_int2 + FROM opt_cols opt_cols0 + ORDER BY my_int IS NULL DESC, my_int DESC + """ + ), + value = Seq( + OptCols(None, None), + OptCols(None, Some(4)), + OptCols(Some(3), None), + OptCols(Some(1), Some(2)) + ) + ) + test("roundTripOptionalValues") - checker.recorded( + """ + This example demonstrates a range of different data types being written + as options, both with Some(v) and None values + """, + Text { + object MyEnum extends Enumeration { + val foo, bar, baz = Value + + implicit def make: String => Value = withName + } + case class OptDataTypes( + myTinyInt: Option[Byte], + mySmallInt: Option[Short], + myInt: Option[Int], + myBigInt: Option[Long], + myDouble: Option[Double], + myBoolean: Option[Boolean], + myLocalDate: Option[LocalDate], + myLocalTime: Option[LocalTime], + myLocalDateTime: Option[LocalDateTime], + myUtilDate: Option[Date], + myInstant: Option[Instant], + myVarBinary: Option[geny.Bytes], + myUUID: Option[java.util.UUID], + myEnum: Option[MyEnum.Value] + ) + + object OptDataTypes extends SimpleTable[OptDataTypes] { + override def tableName: String = "data_types" + } + + val rowSome = OptDataTypes( + myTinyInt = Some(123.toByte), + mySmallInt = Some(12345.toShort), + myInt = Some(12345678), + myBigInt = Some(12345678901L), + myDouble = Some(3.14), + myBoolean = Some(true), + myLocalDate = Some(LocalDate.parse("2023-12-20")), + myLocalTime = Some(LocalTime.parse("10:15:30")), + myLocalDateTime = Some(LocalDateTime.parse("2011-12-03T10:15:30")), + myUtilDate = Some( + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS").parse("2011-12-03T10:15:30.000") + ), + myInstant = Some(Instant.parse("2011-12-03T10:15:30Z")), + myVarBinary = Some(new geny.Bytes(Array[Byte](1, 2, 3, 4, 5, 6, 7, 8))), + myUUID = Some(new java.util.UUID(1234567890L, 9876543210L)), + myEnum = Some(MyEnum.bar) + ) + + val rowNone = OptDataTypes( + myTinyInt = None, + mySmallInt = None, + myInt = None, + myBigInt = None, + myDouble = None, + myBoolean = None, + myLocalDate = None, + myLocalTime = None, + myLocalDateTime = None, + myUtilDate = None, + myInstant = None, + myVarBinary = None, + myUUID = None, + myEnum = None + ) + + db.run( + OptDataTypes.insert.values(rowSome, rowNone) + ) ==> 2 + + db.run(OptDataTypes.select) ==> Seq(rowSome, rowNone) + } + ) + + } + } +} diff --git a/scalasql/namedtuples/test/src/example/SimpleTableH2Example.scala b/scalasql/namedtuples/test/src/example/SimpleTableH2Example.scala new file mode 100644 index 00000000..178a2219 --- /dev/null +++ b/scalasql/namedtuples/test/src/example/SimpleTableH2Example.scala @@ -0,0 +1,82 @@ +// duplicated from scalasql/test/src/example/H2Example.scala +package scalasql.namedtuples.example + +import scalasql.simple.{*, given} +import H2Dialect.* + +object SimpleTableH2Example { + + case class ExampleProduct( + id: Int, + kebabCaseName: String, + name: String, + price: Double + ) + + object ExampleProduct extends SimpleTable[ExampleProduct] + + // The example H2 database comes from the library `com.h2database:h2:2.2.224` + val dataSource = new org.h2.jdbcx.JdbcDataSource + dataSource.setUrl("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1") + lazy val h2Client = new DbClient.DataSource( + dataSource, + config = new {} + ) + + def main(args: Array[String]): Unit = { + h2Client.transaction { db => + db.updateRaw(""" + CREATE TABLE example_product ( + id INTEGER AUTO_INCREMENT PRIMARY KEY, + kebab_case_name VARCHAR(256), + name VARCHAR(256), + price DECIMAL(20, 2) + ); + """) + + val inserted = db.run( + ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( + ("face-mask", "Face Mask", 8.88), + ("guitar", "Guitar", 300), + ("socks", "Socks", 3.14), + ("skate-board", "Skate Board", 123.45), + ("camera", "Camera", 1000.00), + ("cookie", "Cookie", 0.10) + ) + ) + + assert(inserted == 6) + + val result = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result == Seq("Camera", "Guitar", "Skate Board")) + + db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) + + db.run(ExampleProduct.delete(_.name === "Guitar")) + + val result2 = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result2 == Seq("Camera", "Skate Board", "Cookie")) + + val result3 = + db.run( + ExampleProduct.select + .filter(_.price > 10) + .sortBy(_.price) + .desc + .map(p => (name = p.name, price = p.price)) + ) + + assert( + result3 == Seq( + (name = "Camera", price = 1000.00), + (name = "Skate Board", price = 123.45), + (name = "Cookie", price = 11.0) + ) + ) + } + } +} diff --git a/scalasql/namedtuples/test/src/example/SimpleTableMySqlExample.scala b/scalasql/namedtuples/test/src/example/SimpleTableMySqlExample.scala new file mode 100644 index 00000000..0f493a1b --- /dev/null +++ b/scalasql/namedtuples/test/src/example/SimpleTableMySqlExample.scala @@ -0,0 +1,96 @@ +// duplicated from scalasql/test/src/example/MySqlExample.scala +package scalasql.namedtuples.example + +import org.testcontainers.containers.MySQLContainer + +import scalasql.simple.{*, given} +import MySqlDialect.* + +object SimpleTableMySqlExample { + + case class ExampleProduct( + id: Int, + kebabCaseName: String, + name: String, + price: Double + ) + + object ExampleProduct extends SimpleTable[ExampleProduct] + + // The example MySQLContainer comes from the library `org.testcontainers:mysql:1.19.1` + lazy val mysql = { + println("Initializing MySql") + val mysql = new MySQLContainer("mysql:8.0.31") + mysql.setCommand("mysqld", "--character-set-server=utf8mb4", "--collation-server=utf8mb4_bin") + mysql.start() + mysql + } + + val dataSource = new com.mysql.cj.jdbc.MysqlDataSource + dataSource.setURL(mysql.getJdbcUrl + "?allowMultiQueries=true") + dataSource.setDatabaseName(mysql.getDatabaseName); + dataSource.setUser(mysql.getUsername); + dataSource.setPassword(mysql.getPassword); + + lazy val mysqlClient = new DbClient.DataSource( + dataSource, + config = new {} + ) + + def main(args: Array[String]): Unit = { + mysqlClient.transaction { db => + db.updateRaw(""" + CREATE TABLE example_product ( + id INTEGER PRIMARY KEY AUTO_INCREMENT, + kebab_case_name VARCHAR(256), + name VARCHAR(256), + price DECIMAL(20, 2) + ); + """) + + val inserted = db.run( + ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( + ("face-mask", "Face Mask", 8.88), + ("guitar", "Guitar", 300), + ("socks", "Socks", 3.14), + ("skate-board", "Skate Board", 123.45), + ("camera", "Camera", 1000.00), + ("cookie", "Cookie", 0.10) + ) + ) + + assert(inserted == 6) + + val result = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result == Seq("Camera", "Guitar", "Skate Board")) + + db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) + + db.run(ExampleProduct.delete(_.name === "Guitar")) + + val result2 = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result2 == Seq("Camera", "Skate Board", "Cookie")) + + val result3 = + db.run( + ExampleProduct.select + .filter(_.price > 10) + .sortBy(_.price) + .desc + .map(p => (name = p.name, price = p.price)) + ) + + assert( + result3 == Seq( + (name = "Camera", price = 1000.00), + (name = "Skate Board", price = 123.45), + (name = "Cookie", price = 11.0) + ) + ) + } + } +} diff --git a/scalasql/namedtuples/test/src/example/SimpleTablePostgresExample.scala b/scalasql/namedtuples/test/src/example/SimpleTablePostgresExample.scala new file mode 100644 index 00000000..aebcca41 --- /dev/null +++ b/scalasql/namedtuples/test/src/example/SimpleTablePostgresExample.scala @@ -0,0 +1,95 @@ +// duplicated from scalasql/test/src/example/PostgresExample.scala +package scalasql.namedtuples.example + +import org.testcontainers.containers.PostgreSQLContainer + +import scalasql.simple.{*, given} +import PostgresDialect.* + +object SimpleTablePostgresExample { + + case class ExampleProduct( + id: Int, + kebabCaseName: String, + name: String, + price: Double + ) + + object ExampleProduct extends SimpleTable[ExampleProduct] + + // The example PostgreSQLContainer comes from the library `org.testcontainers:postgresql:1.19.1` + lazy val postgres = { + println("Initializing Postgres") + val pg = new PostgreSQLContainer("postgres:15-alpine") + pg.start() + pg + } + + val dataSource = new org.postgresql.ds.PGSimpleDataSource + dataSource.setURL(postgres.getJdbcUrl) + dataSource.setDatabaseName(postgres.getDatabaseName); + dataSource.setUser(postgres.getUsername); + dataSource.setPassword(postgres.getPassword); + + lazy val postgresClient = new DbClient.DataSource( + dataSource, + config = new {} + ) + + def main(args: Array[String]): Unit = { + postgresClient.transaction { db => + db.updateRaw(""" + CREATE TABLE example_product ( + id SERIAL PRIMARY KEY, + kebab_case_name VARCHAR(256), + name VARCHAR(256), + price DECIMAL(20, 2) + ); + """) + + val inserted = db.run( + ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( + ("face-mask", "Face Mask", 8.88), + ("guitar", "Guitar", 300), + ("socks", "Socks", 3.14), + ("skate-board", "Skate Board", 123.45), + ("camera", "Camera", 1000.00), + ("cookie", "Cookie", 0.10) + ) + ) + + assert(inserted == 6) + + val result = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result == Seq("Camera", "Guitar", "Skate Board")) + + db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) + + db.run(ExampleProduct.delete(_.name === "Guitar")) + + val result2 = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result2 == Seq("Camera", "Skate Board", "Cookie")) + + val result3 = + db.run( + ExampleProduct.select + .filter(_.price > 10) + .sortBy(_.price) + .desc + .map(p => (name = p.name, price = p.price)) + ) + + assert( + result3 == Seq( + (name = "Camera", price = 1000.00), + (name = "Skate Board", price = 123.45), + (name = "Cookie", price = 11.0) + ) + ) + } + } +} diff --git a/scalasql/namedtuples/test/src/example/SimpleTableSqliteExample.scala b/scalasql/namedtuples/test/src/example/SimpleTableSqliteExample.scala new file mode 100644 index 00000000..345170ec --- /dev/null +++ b/scalasql/namedtuples/test/src/example/SimpleTableSqliteExample.scala @@ -0,0 +1,83 @@ +// duplicated from scalasql/test/src/example/SqliteExample.scala +package scalasql.namedtuples.example + +import scalasql.simple.{*, given} +import SqliteDialect.* + +object SimpleTableSqliteExample { + + case class ExampleProduct( + id: Int, + kebabCaseName: String, + name: String, + price: Double + ) + + object ExampleProduct extends SimpleTable[ExampleProduct] + + // The example Sqlite JDBC client comes from the library `org.xerial:sqlite-jdbc:3.43.0.0` + val dataSource = new org.sqlite.SQLiteDataSource() + val tmpDb = java.nio.file.Files.createTempDirectory("sqlite") + dataSource.setUrl(s"jdbc:sqlite:$tmpDb/file.db") + lazy val sqliteClient = new DbClient.DataSource( + dataSource, + config = new {} + ) + + def main(args: Array[String]): Unit = { + sqliteClient.transaction { db => + db.updateRaw(""" + CREATE TABLE example_product ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + kebab_case_name VARCHAR(256), + name VARCHAR(256), + price DECIMAL(20, 2) + ); + """) + + val inserted = db.run( + ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( + ("face-mask", "Face Mask", 8.88), + ("guitar", "Guitar", 300), + ("socks", "Socks", 3.14), + ("skate-board", "Skate Board", 123.45), + ("camera", "Camera", 1000.00), + ("cookie", "Cookie", 0.10) + ) + ) + + assert(inserted == 6) + + val result = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result == Seq("Camera", "Guitar", "Skate Board")) + + db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) + + db.run(ExampleProduct.delete(_.name === "Guitar")) + + val result2 = + db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) + + assert(result2 == Seq("Camera", "Skate Board", "Cookie")) + + val result3 = + db.run( + ExampleProduct.select + .filter(_.price > 10) + .sortBy(_.price) + .desc + .map(p => (name = p.name, price = p.price)) + ) + + assert( + result3 == Seq( + (name = "Camera", price = 1000.00), + (name = "Skate Board", price = 123.45), + (name = "Cookie", price = 11.0) + ) + ) + } + } +} diff --git a/scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala b/scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala new file mode 100644 index 00000000..e43ee2bc --- /dev/null +++ b/scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala @@ -0,0 +1,1636 @@ +package test.scalasql.namedtuples +import utest._ + +// adapted from scalasql/test/src/WorldSqlTests.scala to use named tuples +// base commit: 4556a0881848b4efdd4b5b9e536be8b33c44af09 +// changelog: +// - drop `[SC]`, `[T[_]]` and `T[Foo]` +// - use `SimpleTable` instead of `Table` +// - add `// +SNIPPET [FOO]` and `// -SNIPPET [FOO]` markers, +// used to include various snippets in the tutorial.md +// - add example of `Record` updates. +// +// Note: that the actual docs and examples might drift out of sync with `WorldSqlTests.scala` + +import java.sql.{JDBCType, PreparedStatement, ResultSet} + +object WorldSqlTestsNamedTuple extends TestSuite { + // +DOCS + // + // This tutorial is a tour of how to use ScalaSql, from the most basic concepts + // to writing some realistic queries. If you are browsing this on Github, you + // can open the `Outline` pane on the right to browse the section headers to + // see what we will cover and find anything specific of interest to you. + // + // ## Setup + // ### Importing Your Database Dialect + // To begin using ScalaSql, you need the following imports: + // +SNIPPET [IMPORTS] + import scalasql.simple.{*, given} + // -SNIPPET [IMPORTS] + import scalasql.H2Dialect._ + // This readme will use the H2 database for simplicity, but you can change the `Dialect` + // above to other databases as necessary. ScalaSql supports H2, Sqlite, HsqlExpr, + // Postgres, and MySql out of the box. The `Dialect` import provides the + // various operators and functions that may be unique to each specific database + // + // For these examples, we will be using the + // [MySql World Statistics Example Database](https://dev.mysql.com/doc/world-setup/en/), + // adjusted for compatibility with H2 + // + // ```sql + // +INCLUDE scalasql/test/resources/world-schema.sql + // ``` + // + // You can also check out the self-contained examples below if you want to use other + // supported databases, to see what kind of set up is necessary for each one + // + // * [Postgres](scalasql/test/src/example/PostgresExample.scala) + // * [MySql](scalasql/test/src/example/MySqlExample.scala) + // * [Sqlite](scalasql/test/src/example/SqliteExample.scala) + // * [H2](scalasql/test/src/example/H2Example.scala) + // * [HsqlExpr](scalasql/test/src/example/HsqlExprExample.scala) + // * [HikariCP](scalasql/test/src/example/HikariCpExample.scala) (and other connection pools) + // + // ### Modeling Your Schema + // + // Next, you need to define your data model classes. In ScalaSql, your data model + // is defined using `case class`es with each field representing a column in an database table. + // + // There are two flavors to consider: `Table` (available for Scala 2.13+), and `SimpleTable` (Scala 3.7+). + // + // **Using `Table`** + // + // Declare your case class with a type parameter `T[_]`, which is used to wrap the type of each + // field. This allows us to re-use the same case class to represent + // both database values (when `T` is `scalasql.Expr`) as well as Scala values + // (when `T` is `scalasql.Sc`). + // + // Here, we define three classes `Country` `City` and `CountryLanguage`, modeling + // the database tables we saw above + // + // +SNIPPET [TABLES] + case class Country( + code: String, + name: String, + continent: String, + region: String, + surfaceArea: Int, + indepYear: Option[Int], + population: Long, + lifeExpectancy: Option[Double], + gnp: Option[scala.math.BigDecimal], + gnpOld: Option[scala.math.BigDecimal], + localName: String, + governmentForm: String, + headOfState: Option[String], + capital: Option[Int], + code2: String + ) + + object Country extends SimpleTable[Country] + + case class City( + id: Int, + name: String, + countryCode: String, + district: String, + population: Long + ) + + object City extends SimpleTable[City] + + case class CountryLanguage( + countryCode: String, + language: String, + isOfficial: Boolean, + percentage: Double + ) + + object CountryLanguage extends SimpleTable[CountryLanguage] + // -SNIPPET [TABLES] + // -DOCS + + // Shadow uTest's `ArrowAssert` to add lenient SQL string comparisons + implicit class ArrowAssert(lhs: Any) { + def ==>[V](rhs: V) = { + (lhs, rhs) match { + // Hack to make Arrays compare sanely; at some point we may want some + // custom, extensible, typesafe equality check but for now this will do + case (lhs: Array[_], rhs: Array[_]) => + Predef.assert( + lhs.toSeq == rhs.toSeq, + s"==> assertion failed: ${lhs.toSeq} != ${rhs.toSeq}" + ) + // Ignore trivial formatting differences when comparing SQL strings + case (lhs: String, rhs: String) => + Predef.assert( + lhs == rhs.trim.replaceAll("\\s+", " "), + s"==> assertion failed: $lhs != $rhs" + ) + case (lhs, rhs) => + Predef.assert(lhs == rhs, s"==> assertion failed: $lhs != $rhs") + } + } + } + + object NamedTupleDataTypes { + // +DOCS + // **Using `SimpleTable`** + // > Note: only available in the `com.lihaoyi::scalasql-namedtuples` library, which supports Scala 3.7.0+ + // + // Declare your case class as usual. Inside of queries, the class will be represented by a `Record` with the same fields, but wrapped in `scalasql.Expr`. + // + // Here, we define three classes `Country` `City` and `CountryLanguage`, modeling + // the database tables we saw above. + // + // Also included is the necessary import statement to include the `SimpleTable` definition. + // + // ```scala + // +INCLUDE scalasql/namedtuples/test/src/example/WorldSqlTestsSnippets.scala + // ``` + // -DOCS + } + + def tests = Tests { + // +DOCS + // ### Creating Your Database Client + // Lastly, we need to initialize our `scalasql.DbClient`. This requires + // passing in a `java.sql.Connection`, a `scalasql.Config` object, and the SQL dialect + // you are targeting (in this case `H2Dialect`). + val dbClient = new DbClient.Connection( + java.sql.DriverManager + .getConnection("jdbc:h2:mem:testdb" + scala.util.Random.nextInt(), "sa", ""), + new Config { + override def nameMapper(v: String) = v.toLowerCase() + } + ) + + val db = dbClient.getAutoCommitClientConnection + db.updateRaw(os.read(os.Path(sys.env("MILL_TEST_RESOURCE_DIR")) / "world-schema.sql")) + db.updateRaw(os.read(os.Path(sys.env("MILL_TEST_RESOURCE_DIR")) / "world-data.sql")) + // We use `dbClient.getAutoCommitClientConnection` in order to create a client that + // will automatically run every SQL command in a new transaction and commit it. For + // the majority of examples in this page, the exact transaction configuration doesn't + // matter, so using the auto-committing `db` API will help focus on the queries at hand. + // Note that when using a connection pool or `javax.sql.DataSource`, you will need to + // explicitly `.close()` the client returned by `getAutoCommitClientConnection` when you + // are done with it, to avoid leaking connections. Later in this tutorial we will + // see how to use `.transaction{}` blocks to create explicit transactions that can + // be rolled back or committed + + // + // Lastly, we will run the `world.sql` script to initialize the database, and + // we're ready to begin writing queries! + // -DOCS + + test("expr") { + // +DOCS + // ## Expressions + // The simplest thing you can query are `scalasql.Expr`s. These represent the SQL + // expressions that are part of a query, and can be evaluated even without being + // part of any database table. + // + // Here, we construct `Expr`s to represent the SQL query `1 + 3`. We can use + // `db.renderSql` to see the generated SQL code, and `db.run` to send the + // query to the database and return the output `4` + val query = Expr(1) + Expr(3) + db.renderSql(query) ==> "SELECT (? + ?) AS res" + db.run(query) ==> 4 + // In general, most primitive types that can be mapped to SQL can be converted + // to `scalasql.Expr`s: `Int`s and other numeric types, `String`s, `Boolean`s, + // etc., each returning an `Expr[T]` for the respective type `T`. Each type of + // `Expr[T]` has a set of operations representing what operations the database + // supports on that type of expression. + // + // You can check out the [ScalaSql Reference](reference.md#exprops) if you want a + // comprehensive list of built-in operations on various `Expr[T]` types. + // + // -DOCS + } + + test("city") { + // +DOCS + // ## Select + // + // The next type of query to look at are simple `SELECT`s. Each table + // that we modelled earlier has `.insert`, `.select`, `.update`, and `.delete` + // methods to help construct the respective queries. You can run a `Table.select` + // on its own in order to retrieve all the data in the table: + val query = City.select + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + """ + + db.run(query).take(3) ==> Seq( + City(1, "Kabul", "AFG", district = "Kabol", population = 1780000), + City(2, "Qandahar", "AFG", district = "Qandahar", population = 237500), + City(3, "Herat", "AFG", district = "Herat", population = 186800) + ) + // Notice that `db.run` returns instances of type `City` (or `City` if using `SimpleTable`). + // + // `Sc` is `scalasql.Sc`, + // short for the "Scala" type, representing a `City` object containing normal Scala + // values. The `[Sc]` type parameter must be provided explicitly whenever creating, + // type-annotating, or otherwise working with these `City` values. + // + // In this example, we do `.take(3)` after running the query to show only the first + // 3 table entries for brevity, but by that point the `City.select` query had already + // fetched the entire database table into memory. This can be a problem with non-trivial + // datasets, and for that you + // -DOCS + } + + test("filter") { + + test("singleName") { + + // +DOCS + // ### Filtering + // + // To avoid loading the entire database table into your Scala program, you can + // add filters to the query before running it. Below, we add a filter to only + // query the city whose name is "Singapore" + val query = City.select.filter(_.name === "Singapore").single + + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + WHERE (city0.name = ?) + """ + + db.run(query) ==> City(3208, "Singapore", "SGP", district = "", population = 4017733) + // Note that we use `===` rather than `==` for the equality comparison. The + // function literal passed to `.filter` is given a `City[Expr]` as its parameter, + // representing a `City` that is part of the database query, in contrast to the + // `City`s that `db.run` returns , and so `_.name` is of type `Expr[String]` + // rather than just `String` or `Sc[String]`. You can use your IDE's + // auto-complete to see what operations are available on `Expr[String]`: typically + // they will represent SQL string functions rather than Scala string functions and + // take and return `Expr[String]`s rather than plain Scala `String`s. Database + // value equality is represented by the `===` operator. + // + // Note also the `.single` operator. This tells ScalaSql that you expect exactly + // own result row from this query: not zero rows, and not more than one row. This + // causes `db.run` to return a `City` rather than `Seq[City]`, and throw + // an exception if zero or multiple rows are returned by the query. + // + // -DOCS + } + test("head") { + + // +DOCS + // You can also use `.head` rather than `.single`, for cases where you + // want a single result row and want additional result rows to be ignored + // rather than causing an exception. `.head` is short for `.take(1).single` + val query = City.select.filter(_.name === "Singapore").head + + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + WHERE (city0.name = ?) + LIMIT ? + """ + + db.run(query) ==> City(3208, "Singapore", "SGP", district = "", population = 4017733) + // -DOCS + } + + test("singleId") { + + // +DOCS + // Apart from filtering by name, it is also very common to filter by ID, + // as shown below: + val query = City.select.filter(_.id === 3208).single + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + WHERE (city0.id = ?) + """ + + db.run(query) ==> City(3208, "Singapore", "SGP", district = "", population = 4017733) + // -DOCS + } + + test("multiple") { + test("combined") { + // +DOCS + // You can filter on multiple things, e.g. here we look for cities in China + // with population more than 5 million: + val query = City.select.filter(c => c.population > 5000000 && c.countryCode === "CHN") + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + WHERE ((city0.population > ?) AND (city0.countrycode = ?)) + """ + + db.run(query).take(2) ==> Seq( + City(1890, "Shanghai", "CHN", district = "Shanghai", population = 9696300), + City(1891, "Peking", "CHN", district = "Peking", population = 7472000) + ) + // Again, all the operations within the query work on `Expr`s: `c` is a `City[Expr]`, + // `c.population` is an `Expr[Int]`, `c.countryCode` is an `Expr[String]`, and + // `===` and `>` and `&&` on `Expr`s all return `Expr[Boolean]`s that represent + // a SQL expression that can be sent to the Database as part of your query. + // -DOCS + } + + test("separate") { + // +DOCS + // You can also stack multiple separate filters together, as shown below: + val query = City.select.filter(_.population > 5000000).filter(_.countryCode === "CHN") + db.renderSql(query) ==> """ + SELECT + city0.id AS id, + city0.name AS name, + city0.countrycode AS countrycode, + city0.district AS district, + city0.population AS population + FROM city city0 + WHERE (city0.population > ?) AND (city0.countrycode = ?) + """ + + db.run(query).take(2) ==> Seq( + City(1890, "Shanghai", "CHN", district = "Shanghai", population = 9696300), + City(1891, "Peking", "CHN", district = "Peking", population = 7472000) + ) + // -DOCS + } + } + } + + test("lifting") { + test("implicit") { + // +DOCS + // ### Lifting + // Conversion of simple primitive `T`s into `Expr[T]`s happens implicitly. Below, + // `===` expects both left-hand and right-hand values to be `Expr`s. `_.id` is + // already an `Expr[Int]`, but `cityId` is a normal `Int` that is "lifted" into + // a `Expr[Int]` automatically + def find(cityId: Int) = db.run(City.select.filter(_.id === cityId)) + + assert(find(3208) == List(City(3208, "Singapore", "SGP", "", 4017733))) + assert(find(3209) == List(City(3209, "Bratislava", "SVK", "Bratislava", 448292))) + // Lifting of Scala values into your ScalaSql queries is dependent on there being + // an implicit `scalasql.TypeMapper[T]` in scope. + // + // but you can define `TypeMapper`s + // for your own types if you want to be able to use them to represent types in the database + // -DOCS + } + + test("explicit") { + // +DOCS + // This implicit lifting can be done explicitly using the `Expr(...)` syntax + // as shown below + def find(cityId: Int) = db.run(City.select.filter(_.id === Expr(cityId))) + + assert(find(3208) == List(City(3208, "Singapore", "SGP", "", 4017733))) + assert(find(3209) == List(City(3209, "Bratislava", "SVK", "Bratislava", 448292))) + // -DOCS + } + + test("values") { + // +DOCS + // You can also interpolate `Seq[T]`s for any `T: TypeMapper` into your + // query using `Values(...)`, which translates into a SQL `VALUES` clause + val query = City.select + .filter(c => db.values(Seq("Singapore", "Kuala Lumpur", "Jakarta")).contains(c.name)) + .map(_.countryCode) + + db.renderSql(query) ==> """ + SELECT city0.countrycode AS res + FROM city city0 + WHERE (city0.name IN (VALUES (?), (?), (?))) + """ + + db.run(query) ==> Seq("IDN", "MYS", "SGP") + // -DOCS + } + } + + test("mapping") { + test("tuple2") { + // +DOCS + // ### Mapping + // + // You can use `.map` to select exactly what values you want to return from a query. + // Below, we query the `country` table, but only want the `name` and `continent` of + // each country, without all the other metadata: + // +SNIPPET [MAP-1] + // `NamedTupleQueryable` is also included by `import scalasql.simple.given` + import scalasql.namedtuples.NamedTupleQueryable.given + + val query = Country.select.map(c => (name = c.name, continent = c.continent)) + // -SNIPPET [MAP-1] + db.renderSql(query) ==> """ + SELECT country0.name AS res_0, country0.continent AS res_1 + FROM country country0 + """ + + // +SNIPPET [MAP-2] + db.run(query).take(5) ==> Seq( + (name = "Afghanistan", continent = "Asia"), + (name = "Netherlands", continent = "Europe"), + (name = "Netherlands Antilles", continent = "North America"), + (name = "Albania", continent = "Europe"), + (name = "Algeria", continent = "Africa") + ) + // -SNIPPET [MAP-2] + // -DOCS + } + test("record_updates") { + // +SNIPPET [MAP-3] + val query = Country.select.map(c => + c.updates( + _.population := 0L, + _.name(old => Expr("🌐 ") + old) + ) + ) + // -SNIPPET [MAP-3] + db.renderSql(query) ==> """ + SELECT + country0.code AS code, + (? || country0.name) AS name, + country0.continent AS continent, + country0.region AS region, + country0.surfacearea AS surfacearea, + country0.indepyear AS indepyear, + ? AS population, + country0.lifeexpectancy AS lifeexpectancy, + country0.gnp AS gnp, + country0.gnpold AS gnpold, + country0.localname AS localname, + country0.governmentform AS governmentform, + country0.headofstate AS headofstate, + country0.capital AS capital, + country0.code2 AS code2 + FROM country country0 + """ + // +SNIPPET [MAP-4] + db.run(query).take(5).match { + case Seq( + Country(name = "🌐 Afghanistan", population = 0L), + Country(name = "🌐 Netherlands", population = 0L), + Country(name = "🌐 Netherlands Antilles", population = 0L), + Country(name = "🌐 Albania", population = 0L), + Country(name = "🌐 Algeria", population = 0L) + ) => + } ==> () + // -SNIPPET [MAP-4] + } + + test("heterogenousTuple") { + // +DOCS + // These `.map` calls can contains arbitrarily complex data: below, we query + // the `city` table to look for `Singapore` and get the entire row as a `City`, + // but also want to fetch the uppercase name and the population-in-millions. As + // you would expect, you get a tuple of `(City, String, Int)` back. + val query = City.select + .filter(_.name === "Singapore") + .map(c => (c, c.name.toUpperCase, c.population / 1000000)) + .single + + db.renderSql(query) ==> """ + SELECT + city0.id AS res_0_id, + city0.name AS res_0_name, + city0.countrycode AS res_0_countrycode, + city0.district AS res_0_district, + city0.population AS res_0_population, + UPPER(city0.name) AS res_1, + (city0.population / ?) AS res_2 + FROM city city0 + WHERE (city0.name = ?) + """ + + db.run(query) ==> + ( + City(3208, "Singapore", "SGP", district = "", population = 4017733), + "SINGAPORE", + 4 // population in millions + ) + // -DOCS + } + } + + test("aggregate") { + test("sum") { + // +DOCS + // ### Aggregates + // + // You can perform simple aggregates like `.sum` as below, where we + // query all cities in China and sum up their populations + val query = City.select.filter(_.countryCode === "CHN").map(_.population).sum + db.renderSql(query) ==> + "SELECT SUM(city0.population) AS res FROM city city0 WHERE (city0.countrycode = ?)" + + db.run(query) ==> 175953614 + // -DOCS + } + test("sumBy") { + // +DOCS + // Many aggregates have a `By` version, e.g. `.sumBy`, which allows you to + // customize exactly what you are aggregating: + val query = City.select.sumBy(_.population) + db.renderSql(query) ==> "SELECT SUM(city0.population) AS res FROM city city0" + + db.run(query) ==> 1429559884 + // -DOCS + } + test("size") { + // +DOCS + // `.size` is a commonly used function that translates to the SQL aggregate + // `COUNT(1)`. Below, we count how many countries in our database have population + // greater than one million + val query = Country.select.filter(_.population > 1000000).size + db.renderSql(query) ==> + "SELECT COUNT(1) AS res FROM country country0 WHERE (country0.population > ?)" + + db.run(query) ==> 154 + // -DOCS + } + test("aggregate") { + // +DOCS + // If you want to perform multiple aggregates at once, you can use the `.aggregate` + // function. Below, we run a single query that returns the minimum, average, and + // maximum populations across all countries in our dataset + val query = Country.select + .aggregate(cs => (cs.minBy(_.population), cs.avgBy(_.population), cs.maxBy(_.population))) + db.renderSql(query) ==> """ + SELECT + MIN(country0.population) AS res_0, + AVG(country0.population) AS res_1, + MAX(country0.population) AS res_2 + FROM country country0 + """ + + db.run(query) ==> (0, 25434098, 1277558000) + // -DOCS + } + } + + test("sortDropTake") { + // +DOCS + // ### Sort/Drop/Take + // + // You can use `.sortBy` to order the returned rows, and `.drop` and `.take` + // to select a range of rows within the entire result set: + val query = City.select + .sortBy(_.population) + .desc + .drop(5) + .take(5) + .map(c => (c.name, c.population)) + + db.renderSql(query) ==> """ + SELECT city0.name AS res_0, city0.population AS res_1 + FROM city city0 + ORDER BY res_1 DESC + LIMIT ? OFFSET ? + """ + + db.run(query) ==> Seq( + ("Karachi", 9269265), + ("Istanbul", 8787958), + ("Ciudad de México", 8591309), + ("Moscow", 8389200), + ("New York", 8008278) + ) + // You can also use `.drop` and `.take` without `.sortBy`, but in that case + // the order of returned rows is arbitrary and may differ between databases + // and implementations + // + // -DOCS + } + test("casting") { + // +DOCS + // ### Casting + // + // You can use `.cast` to generate SQL `CAST` calls between data types. Below, + // we use it to query Singapore's life expectancy and convert it from a `Double` + // precision floating point number to an `Int`: + val query = Country.select + .filter(_.name === "Singapore") + .map(_.lifeExpectancy.cast[Int]) + .single + + db.renderSql(query) ==> """ + SELECT CAST(country0.lifeexpectancy AS INTEGER) AS res + FROM country country0 + WHERE (country0.name = ?) + """ + + db.run(query) ==> 80 + // You can `.cast` to any type with a `TypeMapper[T]` defined, which is the + // same set of types you can lift into queries. + // + // + // -DOCS + } + + test("nullable") { + test("operations") { + // +DOCS + // ### Nullable Columns + // + // Nullable SQL columns are modeled via `T[Option[V]]` fields in your `case class`, + // meaning `Expr[Option[V]]` in your query and meaning `Sc[Option[V]]` (or just + // meaning `Option[V]`) in the returned data. `Expr[Option[V]]` supports a similar + // set of operations as `Option[V]`: `isDefined`, `isEmpty`, `map`, `flatMap`, `get`, + // `orElse`, etc., but returning `Expr[V]`s rather than plain `V`s. + val query = Country.select + .filter(_.capital.isEmpty) + .size + + db.renderSql(query) ==> """ + SELECT COUNT(1) AS res + FROM country country0 + WHERE (country0.capital IS NULL) + """ + + db.run(query) ==> 7 + // -DOCS + } + test("equality") { + // +DOCS + // ScalaSQL supports two different kinds of equality: + // + // ```scala + // // Scala equality + // a === b + // a !== b + // // SQL equality + // a `=` b + // a <> b + // ``` + // + // Most of the time these two things are the same, except when `a` or `b` + // are nullable. In that case: + // + // * SQL equality follows SQL rules that `NULL = anything` + // is always `false`, and `NULL <> anything` is also always false. + // + // * Scala equality follows Scala rules that `None === None` is `true` + // + // The difference between these two operations can be seen below, where + // using SQL equality to compare the `capital` column against a `None` + // value translates directly into a SQL `=` which always returns false + // because the right hand value is `None`/`NULL`, thus returning zero + // countries: + + val myOptionalCityId: Option[Int] = None + val query = Country.select + .filter(_.capital `=` myOptionalCityId) + .size + + db.renderSql(query) ==> """ + SELECT COUNT(1) AS res + FROM country country0 + WHERE (country0.capital = ?) + """ + + db.run(query) ==> 0 + // Whereas using Scala equality with `===` translates into a more + // verbose `IS NOT DISTINCT FROM` + // expression, returning `true` when both left-hand and right-hand values + // are `None`/`NULL`, thus successfully returning all countries for which + // the `capital` column is `NULL` + val query2 = Country.select + .filter(_.capital === myOptionalCityId) + .size + + db.renderSql(query2) ==> """ + SELECT COUNT(1) AS res + FROM country country0 + WHERE (country0.capital IS NOT DISTINCT FROM ?) + """ + + db.run(query2) ==> 7 + // -DOCS + } + } + test("joins") { + test("inner") { + // +DOCS + // ### Joins + // + // You can perform SQL inner `JOIN`s between tables via the `.join` or `.join` + // methods. Below, we use a `JOIN` to look for cities which are in the country + // named "Liechtenstein": + val query = City.select + .join(Country)(_.countryCode === _.code) + .filter { case (city, country) => country.name === "Liechtenstein" } + .map { case (city, country) => city.name } + + db.renderSql(query) ==> """ + SELECT city0.name AS res + FROM city city0 + JOIN country country1 ON (city0.countrycode = country1.code) + WHERE (country1.name = ?) + """ + + db.run(query) ==> Seq("Schaan", "Vaduz") + // -DOCS + } + test("right") { + // +DOCS + // `LEFT JOIN`, `RIGHT JOIN`, and `OUTER JOIN`s are also supported, e.g. + val query = City.select + .rightJoin(Country)(_.countryCode === _.code) + .filter { case (cityOpt, country) => cityOpt.isEmpty(_.id) } + .map { case (cityOpt, country) => (cityOpt.map(_.name), country.name) } + + db.renderSql(query) ==> """ + SELECT city0.name AS res_0, country1.name AS res_1 + FROM city city0 + RIGHT JOIN country country1 ON (city0.countrycode = country1.code) + WHERE (city0.id IS NULL) + """ + + db.run(query) ==> Seq( + (None, "Antarctica"), + (None, "Bouvet Island"), + (None, "British Indian Ocean Territory"), + (None, "South Georgia and the South Sandwich Islands"), + (None, "Heard Island and McDonald Islands"), + (None, "French Southern territories"), + (None, "United States Minor Outlying Islands") + ) + // Note that when you use a left/right/outer join, the corresponding + // rows are provided to you as `scalasql.JoinNullable[T]` rather than plain `T`s, e.g. + // `cityOpt: scalasql.JoinNullable[City[Expr]]` above. `JoinNullable[T]` can be checked + // for presence/absence using `.isEmpty` and specifying a specific column to check, + // and can be converted to an `Expr[Option[T]]` by `.map`ing itt to a particular + // `Expr[T]`. + // + // -DOCS + } + + test("flatMap") { + // +DOCS + // ScalaSql also supports performing `JOIN`s via Scala's `for`-comprehension syntax and `.join`. + // `for`-comprehensions also support `.crossJoin()` for joins without an `ON` clause, and + // `.leftJoin()` returning `JoinNullable[T]` for joins where joined table may not have corresponding + // rows. + val query = for { + city <- City.select + country <- Country.join(city.countryCode === _.code) + if country.name === "Liechtenstein" + } yield city.name + + db.renderSql(query) ==> """ + SELECT city0.name AS res + FROM city city0 + JOIN country country1 ON (city0.countrycode = country1.code) + WHERE (country1.name = ?) + """ + + db.run(query) ==> Seq("Schaan", "Vaduz") + // -DOCS + } + } + + test("subquery") { + test("join") { + // +DOCS + // ## Subqueries + // + // ScalaSql in general allows you to use SQL Subqueries anywhere you would use + // a table. e.g. you can pass a Subquery to `.join`, as we do in the below + // query to find language and the name of the top 2 most populous countries: + val query = CountryLanguage.select + .join(Country.select.sortBy(_.population).desc.take(2))(_.countryCode === _.code) + .map { case (language, country) => (language.language, country.name) } + .sortBy(_._1) + + db.renderSql(query) ==> """ + SELECT countrylanguage0.language AS res_0, subquery1.name AS res_1 + FROM countrylanguage countrylanguage0 + JOIN (SELECT + country1.code AS code, + country1.name AS name, + country1.population AS population + FROM country country1 + ORDER BY population DESC + LIMIT ?) subquery1 + ON (countrylanguage0.countrycode = subquery1.code) + ORDER BY res_0 + """ + + db.run(query).take(5) ==> Seq( + ("Asami", "India"), + ("Bengali", "India"), + ("Chinese", "China"), + ("Dong", "China"), + ("Gujarati", "India") + ) + // -DOCS + } + test("from") { + // +DOCS + // Some operations automatically generate subqueries where necessary, e.g. + // performing a `.join` after you have done a `.take`: + val query = Country.select + .sortBy(_.population) + .desc + .take(2) + .join(CountryLanguage)(_.code === _.countryCode) + .map { case (country, language) => + (language.language, country.name) + } + .sortBy(_._1) + + db.renderSql(query) ==> """ + SELECT countrylanguage1.language AS res_0, subquery0.name AS res_1 + FROM (SELECT + country0.code AS code, + country0.name AS name, + country0.population AS population + FROM country country0 + ORDER BY population DESC + LIMIT ?) subquery0 + JOIN countrylanguage countrylanguage1 + ON (subquery0.code = countrylanguage1.countrycode) + ORDER BY res_0 + """ + + db.run(query).take(5) ==> List( + ("Asami", "India"), + ("Bengali", "India"), + ("Chinese", "China"), + ("Dong", "China"), + ("Gujarati", "India") + ) + // -DOCS + } + test("force") { + // +DOCS + // You can force a subquery using `.subquery`, in cases where it would normally + // be combined into a single query. This can be useful in cases where the + // database query plan changes based on whether a subquery is present or not + val query = Country.select.sortBy(_.population).desc.subquery.take(2).map(_.name) + + db.renderSql(query) ==> """ + SELECT subquery0.name AS res + FROM (SELECT country0.name AS name, country0.population AS population + FROM country country0 + ORDER BY population DESC) subquery0 + LIMIT ? + """ + + db.run(query) ==> List("China", "India") + // -DOCS + } + } + + test("union") { + // +DOCS + // ## Union/Except/Intersect + // + // ScalaSql supports `.union`/`.unionAll`/`.except`/`.intersect` operations, + // generating SQL `UNION`/`UNION ALL`/`EXCEPT`/`INTERSECT` clauses. These + // also generate subqueries as necessary + val largestCountries = + Country.select.sortBy(_.name).sortBy(_.population).desc.take(2).map(_.name) + + val smallestCountries = + Country.select.sortBy(_.name).sortBy(_.population).asc.take(2).map(_.name) + + val query = smallestCountries.union(largestCountries) + + db.renderSql(query) ==> """ + SELECT subquery0.res AS res + FROM (SELECT country0.name AS res + FROM country country0 + ORDER BY country0.population ASC, res + LIMIT ?) subquery0 + UNION + SELECT subquery0.res AS res + FROM (SELECT country0.name AS res + FROM country country0 + ORDER BY country0.population DESC, res + LIMIT ?) subquery0 + """ + + db.run(query) ==> List("Antarctica", "Bouvet Island", "China", "India") + // -DOCS + } + test("window") { + test("simple") { + // +DOCS + // ## Window Functions + // ScalaSql supports window functions via the `.over` operator, which + // enables the `.partitionBy` and `.sortBy` operators on `Expr[T]`. These + // translate into SQL's `OVER`/`PARTITION BY`/`ORDER BY` clauses + val query = City.select + .map(c => + ( + c.name, + c.countryCode, + c.population, + db.rank().over.partitionBy(c.countryCode).sortBy(c.population).desc + ) + ) + .filter { case (name, countryCode, population, rank) => + db.values(Seq("San Francisco", "New York", "Kuala Lumpur", "Pinang", "Johor Baharu")) + .contains(name) + } + + db.renderSql(query) ==> """ + SELECT + city0.name AS res_0, + city0.countrycode AS res_1, + city0.population AS res_2, + RANK() OVER (PARTITION BY city0.countrycode ORDER BY city0.population DESC) AS res_3 + FROM city city0 + WHERE (city0.name IN (VALUES (?), (?), (?), (?), (?))) + """ + + db.run(query) ==> Seq( + ("Kuala Lumpur", "MYS", 1297526L, 1), + ("Johor Baharu", "MYS", 328436L, 2), + ("Pinang", "MYS", 219603L, 3), + ("New York", "USA", 8008278L, 1), + ("San Francisco", "USA", 776733L, 2) + ) + // -DOCS + } + test("aggregate") { + // +DOCS + // You can also perform aggregates as part of your window function by using + // the `.mapAggregate` function; this provides a `Aggregatable.Proxy[Q]` rather than + // a `Q`, letting you perform aggregates like `.sumBy` that you can then use + // as window functions via `.over`. You can reference normal columns by referencing + // the `.expr` member on each `Aggregatable.Proxy`. + val query = City.select + .mapAggregate((c, cs) => + ( + c.name, + c.countryCode, + c.population, + cs.sumBy(_.population).over.partitionBy(c.countryCode).sortBy(c.population).desc + ) + ) + .filter { case (name, countryCode, population, rank) => + db.values(Seq("Singapore", "Kuala Lumpur", "Pinang", "Johor Baharu")).contains(name) + } + + db.renderSql(query) ==> """ + SELECT + city0.name AS res_0, + city0.countrycode AS res_1, + city0.population AS res_2, + SUM(city0.population) OVER (PARTITION BY city0.countrycode ORDER BY city0.population DESC) AS res_3 + FROM city city0 + WHERE (city0.name IN (VALUES (?), (?), (?), (?))) + """ + + db.run(query).sortBy(t => (t._2, t._4)) ==> Seq( + ("Kuala Lumpur", "MYS", 1297526L, 1297526L), + ("Johor Baharu", "MYS", 328436L, 1625962L), + ("Pinang", "MYS", 219603L, 1845565L), + ("Singapore", "SGP", 4017733L, 4017733L) + ) + // -DOCS + } + } + + test("realistic") { + test("languagesByCities") { + // +DOCS + // + // ## Realistic Queries + // + // ### Languages Spoken In Most Cities + // Here's a more complicated query using the techniques we've learned so far: + // a query fetching the top 10 languages spoken by the largest number of cities + val query = City.select + .join(CountryLanguage)(_.countryCode === _.countryCode) + .map { case (city, language) => (city.id, language.language) } + .groupBy { case (city, language) => language }(_.size) + .sortBy { case (language, cityCount) => cityCount } + .desc + .take(10) + + db.renderSql(query) ==> """ + SELECT countrylanguage1.language AS res_0, COUNT(1) AS res_1 + FROM city city0 + JOIN countrylanguage countrylanguage1 ON (city0.countrycode = countrylanguage1.countrycode) + GROUP BY countrylanguage1.language + ORDER BY res_1 DESC + LIMIT ? + """ + + db.run(query) ==> Seq( + ("Chinese", 1083), + ("German", 885), + ("Spanish", 881), + ("Italian", 857), + ("English", 823), + ("Japanese", 774), + ("Portuguese", 629), + ("Korean", 608), + ("Polish", 557), + ("French", 467) + ) + // -DOCS + } + + test("weightedLifeExpectancyByContinent") { + // +DOCS + // ### Population-Weighted Average Life Expectancy Per Continent + // Another non-trivia query: listing the population-weighted + // average life expectancy per continent + val query = Country.select + .groupBy(_.continent)(group => + group.sumBy(c => c.lifeExpectancy.get * c.population) / group.sumBy(_.population) + ) + .sortBy(_._2) + .desc + + db.renderSql(query) ==> """ + SELECT + country0.continent AS res_0, + (SUM((country0.lifeexpectancy * country0.population)) / SUM(country0.population)) AS res_1 + FROM country country0 + GROUP BY country0.continent + ORDER BY res_1 DESC + """ + + db.run(query) ==> Seq( + ("Oceania", 75.90188415576932), + ("North America", 74.91544123695004), + ("Europe", 73.82361172661305), + ("South America", 67.54433544271905), + ("Asia", 67.35222776275229), + ("Africa", 52.031677405178264), + ("Antarctica", 0.0) + ) + // -DOCS + } + + test("largestCityInThreeLargestCountries") { + // +DOCS + // ### Most Populous City in each of the Three Most Populous Countries + // This example uses first gets the three largest Countries, `JOIN`s the + // Cities, uses a filter with a subquery to pick the city with the largest + // population in each country, and then returns the name and population of + // each city/country pair. The "top 3 countries" part of the query before + // the `JOIN` is automatically converted to a subquery to be compliant + // with SQL syntax + + val query = Country.select + .sortBy(_.population) + .desc + .take(3) + .join(City)(_.code === _.countryCode) + .filter { case (country, city) => + city.id === + City.select + .filter(_.countryCode === country.code) + .sortBy(_.population) + .desc + .map(_.id) + .take(1) + .toExpr + } + .map { case (country, city) => + (country.name, country.population, city.name, city.population) + } + + db.renderSql(query) ==> """ + SELECT + subquery0.name AS res_0, + subquery0.population AS res_1, + city1.name AS res_2, + city1.population AS res_3 + FROM (SELECT + country0.code AS code, + country0.name AS name, + country0.population AS population + FROM country country0 + ORDER BY population DESC + LIMIT ?) subquery0 + JOIN city city1 ON (subquery0.code = city1.countrycode) + WHERE (city1.id = (SELECT + city2.id AS res + FROM city city2 + WHERE (city2.countrycode = subquery0.code) + ORDER BY city2.population DESC + LIMIT ?)) + """ + + db.run(query) ==> Seq( + ("China", 1277558000, "Shanghai", 9696300), + ("India", 1013662000, "Mumbai (Bombay)", 10500000), + ("United States", 278357000, "New York", 8008278) + ) + // -DOCS + } + + test("largestThreeCitiesByCountry") { + // +DOCS + // ### Most Populous Three Cities In Each Country + // This example queries the top 3 cities with the largest population in + // each country, using ScalaSql's `rank()` function that translates into + // SQL's `RANK()`. Note that `RANK()` does not work inside the SQL `WHERE` + // clause, and so we need to use `.subquery` to ensure that the `RANK()` is + // run in an isolated subquery and does not get executed in the WHERE clause + val query = City.select + .map(c => (c, db.rank().over.partitionBy(c.countryCode).sortBy(c.population).desc)) + .subquery + .filter { case (city, r) => r <= 3 } + .map { case (city, r) => (city.name, city.population, city.countryCode, r) } + .join(Country)(_._3 === _.code) + .sortBy(_._5.population) + .desc + .map { case (name, population, countryCode, r, country) => + (name, population, countryCode, r) + } + + db.renderSql(query) ==> """ + SELECT + subquery0.res_0_name AS res_0, + subquery0.res_0_population AS res_1, + subquery0.res_0_countrycode AS res_2, + subquery0.res_1 AS res_3 + FROM (SELECT + city0.name AS res_0_name, + city0.countrycode AS res_0_countrycode, + city0.population AS res_0_population, + RANK() OVER (PARTITION BY city0.countrycode ORDER BY city0.population DESC) AS res_1 + FROM city city0) subquery0 + JOIN country country1 ON (subquery0.res_0_countrycode = country1.code) + WHERE (subquery0.res_1 <= ?) + ORDER BY country1.population DESC + """ + + db.run(query).take(10) ==> Seq( + ("Shanghai", 9696300L, "CHN", 1), + ("Peking", 7472000L, "CHN", 2), + ("Chongqing", 6351600L, "CHN", 3), + ("Mumbai (Bombay)", 10500000L, "IND", 1), + ("Delhi", 7206704L, "IND", 2), + ("Calcutta [Kolkata]", 4399819L, "IND", 3), + ("New York", 8008278L, "USA", 1), + ("Los Angeles", 3694820L, "USA", 2), + ("Chicago", 2896016L, "USA", 3), + ("Jakarta", 9604900L, "IDN", 1) + ) + // -DOCS + } + } + + test("insert") { + test("values") { + // +DOCS + // ## Inserts + // ScalaSql supports SQL `INSERT`s with multiple styles. You can insert + // a single row via `.insert.values`, passing the columns you want to insert + // (and leaving out any that the database would auto-populate) + val query = City.insert.columns( // ID provided by database AUTO_INCREMENT + _.name := "Sentosa", + _.countryCode := "SGP", + _.district := "South", + _.population := 1337 + ) + db.renderSql(query) ==> + "INSERT INTO city (name, countrycode, district, population) VALUES (?, ?, ?, ?)" + + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq( + City(3208, "Singapore", "SGP", district = "", population = 4017733), + City(4080, "Sentosa", "SGP", district = "South", population = 1337) + ) + // -DOCS + } + + test("values") { + // +DOCS + // You can perform batch inserts via `.insert.batched`, passing in both a set of + // columns and a list of tuples that provide the data inserted into those columns: + val query = City.insert.batched(_.name, _.countryCode, _.district, _.population)( + ("Sentosa", "SGP", "South", 1337), // ID provided by database AUTO_INCREMENT + ("Loyang", "SGP", "East", 31337), + ("Jurong", "SGP", "West", 313373) + ) + db.renderSql(query) ==> """ + INSERT INTO city (name, countrycode, district, population) VALUES + (?, ?, ?, ?), + (?, ?, ?, ?), + (?, ?, ?, ?) + """ + + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq( + City(3208, "Singapore", "SGP", district = "", population = 4017733), + City(4080, "Sentosa", "SGP", district = "South", population = 1337), + City(4081, "Loyang", "SGP", district = "East", population = 31337), + City(4082, "Jurong", "SGP", district = "West", population = 313373) + ) + // -DOCS + } + + test("select") { + // +DOCS + // Or you can provide an entire `SELECT` query via `.insert.select`, allowing + // you to select arbitrary data from the same table (or another table) to insert: + val query = City.insert.select( + c => (c.name, c.countryCode, c.district, c.population), + City.select + .filter(_.name === "Singapore") + .map(c => (Expr("New-") + c.name, c.countryCode, c.district, Expr(0L))) + ) + + db.renderSql(query) ==> """ + INSERT INTO city (name, countrycode, district, population) + SELECT (? || city0.name) AS res_0, city0.countrycode AS res_1, city0.district AS res_2, ? AS res_3 + FROM city city0 + WHERE (city0.name = ?) + """ + + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq( + City(3208, "Singapore", "SGP", district = "", population = 4017733), + City(4080, "New-Singapore", "SGP", district = "", population = 0) + ) + // These three styles of inserts that ScalaSql provides correspond directly to the + // various `INSERT` syntaxes supported by the underlying database. + // + // -DOCS + } + } + + test("update") { + test("simple") { + // +DOCS + // ## Updates + // + // ScalaSql allows updates via the `.update` syntax, that takes a filter + // and a list of columns to update: + val query = City + .update(_.countryCode === "SGP") + .set(_.population := 0, _.district := "UNKNOWN") + + db.renderSql(query) ==> + "UPDATE city SET population = ?, district = ? WHERE (city.countrycode = ?)" + + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "UNKNOWN", population = 0) + // -DOCS + } + + test("computed") { + // +DOCS + // You can perform computed updates by referencing columns as part of the + // expressions passed to the `.set` call: + val query = City + .update(_.countryCode === "SGP") + .set(c => c.population := (c.population + 1000000)) + db.renderSql(query) ==> + "UPDATE city SET population = (city.population + ?) WHERE (city.countrycode = ?)" + + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "", population = 5017733) + // -DOCS + } + test("all") { + // +DOCS + // The filter predicate to `.update` is mandatory, to avoid performing updates across + // an entire database table accidentally . If you really want to perform an update + // on every single row, you can pass in `_ => true` as your filter: + val query = City.update(_ => true).set(_.population := 0) + db.renderSql(query) ==> "UPDATE city SET population = ?" + + db.run(query) + + db.run(City.select.filter(_.countryCode === "LIE")) ==> Seq( + City(2445, "Schaan", "LIE", district = "Schaan", population = 0), + City(2446, "Vaduz", "LIE", district = "Vaduz", population = 0) + ) + // -DOCS + } + } + + test("delete") { + // +DOCS + // ## Deletes + // Deletes are performed by the `.delete` method, which takes a predicate + // letting you specify what rows you want to delete. + val query = City.delete(_.countryCode === "SGP") + db.renderSql(query) ==> "DELETE FROM city WHERE (city.countrycode = ?)" + db.run(query) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() + // -DOCS + } + + test("transactions") { + test("exception") { + // +DOCS + // ## Transactions + // You can use `.transaction` to perform an explicit database transaction. + // This transaction is opened when `.transaction` begins, is commited when + // `.transaction` terminates successfully, and is rolled back if + // `.transaction` terminates with an error. + // + // Below, we can see how `.delete` immediately takes effect within the + // transaction, but when it fails due to an exception the deletion is rolled + // back and a subsequent transaction can see the deleted city re-appear + try { + dbClient.transaction { implicit db => + db.run(City.delete(_.countryCode === "SGP")) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() + + throw new Exception() + } + } catch { + case e: Exception => /*do nothing*/ + } + + dbClient.transaction { implicit db => + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "", population = 4017733) + } + // -DOCS + } + test("rollback") { + // +DOCS + // You can also roll back a transaction explicitly via the `.rollback()` method: + dbClient.transaction { implicit db => + db.run(City.delete(_.countryCode === "SGP")) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() + + db.rollback() + } + + dbClient.transaction { implicit db => + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "", population = 4017733) + } + // -DOCS + } + } + test("savepoint") { + test("exception") { + // +DOCS + // ### Savepoints + // Most database support Savepoints, which are sort of "nested transactions" + // allowing you to roll back portions of a transaction without rolling back + // everything. + // + // ScalaSql supports these via the `.savepoint` method, which works similarly + // to `.transaction`: if the provided block terminates successfully the savepoint + // is committed ("released"), if it terminates with an exception the savepoint + // is rolled back and all changes are undoned (as seen below) + dbClient.transaction { implicit db => + try { + db.savepoint { _ => + db.run(City.delete(_.countryCode === "SGP")) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() + throw new Exception() + } + } catch { + case e: Exception => /*do nothing*/ + } + + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "", population = 4017733) + + } + // -DOCS + } + test("rollback") { + // +DOCS + // Savepoints support an explicit `.rollback()` method, just as transactions do: + dbClient.transaction { implicit db => + db.savepoint { implicit sp => + db.run(City.delete(_.countryCode === "SGP")) + + db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() + + sp.rollback() + } + + db.run(City.select.filter(_.countryCode === "SGP").single) ==> + City(3208, "Singapore", "SGP", district = "", population = 4017733) + } + // -DOCS + } + } + + test("customExpressions") { + // +DOCS + // ## Custom Expressions + // + // You can define custom SQL expressions via the `Expr` constructor. This is + // useful for enclosing ScalaSql when you need to use some operator or syntax + // that your Database supports but ScalaSql does not have built in. This example + // shows how to define a custom `rawToHex` Scala function working on `Expr[T]`s, + // that translates down to the H2 database's `RAWTOHEX` SQL function, and finally + // using that in a query to return a string. + import scalasql.core.SqlStr.SqlStringSyntax + + def rawToHex(v: Expr[String]): Expr[String] = Expr { implicit ctx => sql"RAWTOHEX($v)" } + + val query = City.select.filter(_.countryCode === "SGP").map(c => rawToHex(c.name)).single + + db.renderSql(query) ==> + "SELECT RAWTOHEX(city0.name) AS res FROM city city0 WHERE (city0.countrycode = ?)" + + db.run(query) ==> "00530069006e006700610070006f00720065" + // Your custom Scala functions can either be standalone functions or extension + // methods. Most of the operators on `Expr[T]` that ScalaSql comes bundled with + // are extension methods, with a different set being made available for each database. + // + // Different databases have a huge range of functions available. ScalaSql comes + // with the most commonly-used functions built in, but it is expected that you will + // need to build up your own library of custom `Expr[T]` functions to to access + // less commonly used functions that are nonetheless still needed in your application + // -DOCS + } + test("customTypeMapper") { + // +DOCS + // ## Custom Type Mappings + // + // You can define custom `TypeMapper`s to support reading and writing values to + // the database which are of a type not supported by ScalaSql. The example below + // demonstrates how to define a custom `CityId` type, define an implicit `TypeMapper` + // for it, and then `INSERT` it into the database and `SELECT` it out after. + // + + case class CityId(value: Int) + + object CityId { + implicit def tm: TypeMapper[CityId] = new TypeMapper[CityId] { + def jdbcType: JDBCType = JDBCType.INTEGER + + def get(r: ResultSet, idx: Int): CityId = new CityId(r.getInt(idx)) + + def put(r: PreparedStatement, idx: Int, v: CityId): Unit = r.setInt(idx, v.value) + } + } + + // -DOCS + // Note sure why this is required, probably a Scalac bug + SqlStr.Interp.TypeInterp[CityId](CityId(1337)) + // +DOCS + + case class City( + id: CityId, + name: String, + countryCode: String, + district: String, + population: Long + ) + + object City extends SimpleTable[City] { + override def tableName: String = "city" + } + db.run( + City.insert.columns( + _.id := CityId(313373), + _.name := "test", + _.countryCode := "XYZ", + _.district := "district", + _.population := 1000000 + ) + ) + + db.run(City.select.filter(_.id === 313373).single) ==> + City(CityId(313373), "test", "XYZ", "district", 1000000) + // -DOCS + + // You can also use `TypeMapper#bimap` for the common case where you want the + // new `TypeMapper` to behave the same as an existing `TypeMapper`, just with + // conversion functions to convert back and forth between the old type and new type: + + case class CityId2(value: Int) + + object CityId2 { + implicit def tm: TypeMapper[CityId2] = TypeMapper[Int].bimap[CityId2]( + city => city.value, + int => CityId2(int) + ) + } + + // -DOCS + // Note sure why this is required, probably a Scalac bug + SqlStr.Interp.TypeInterp[CityId2](CityId2(1337)) + // +DOCS + case class City2( + id: CityId2, + name: String, + countryCode: String, + district: String, + population: Long + ) + + object City2 extends SimpleTable[City2] { + override def tableName: String = "city" + } + db.run( + City2.insert.columns( + _.id := CityId2(31337), + _.name := "test", + _.countryCode := "XYZ", + _.district := "district", + _.population := 1000000 + ) + ) + + db.run(City2.select.filter(_.id === 31337).single) ==> + City2(CityId2(31337), "test", "XYZ", "district", 1000000) + } + test("customTableColumnNames") { + // +DOCS + // ## Customizing Table and Column Names + // + // ScalaSql allows you to customize the table and column names via overriding + // `def table` and `def tableColumnNameOverride` om your `Table` object. + + case class CityCustom( + idCustom: Int, + nameCustom: String, + countryCodeCustom: String, + districtCustom: String, + populationCustom: Long + ) + + object CityCustom extends SimpleTable[CityCustom] { + + override def tableName: String = "city" + + override def tableColumnNameOverride(s: String): String = s match { + case "idCustom" => "id" + case "nameCustom" => "name" + case "countryCodeCustom" => "countrycode" + case "districtCustom" => "district" + case "populationCustom" => "population" + } + } + + val query = CityCustom.select + db.renderSql(query) ==> """ + SELECT + city0.id AS idcustom, + city0.name AS namecustom, + city0.countrycode AS countrycodecustom, + city0.district AS districtcustom, + city0.population AS populationcustom + FROM city city0 + """ + + db.run(query).take(3) ==> Seq( + CityCustom(1, "Kabul", "AFG", districtCustom = "Kabol", populationCustom = 1780000), + CityCustom( + 2, + "Qandahar", + "AFG", + districtCustom = "Qandahar", + populationCustom = 237500 + ), + CityCustom(3, "Herat", "AFG", districtCustom = "Herat", populationCustom = 186800) + ) + // -DOCS + } + } + +} diff --git a/scalasql/namedtuples/test/src/example/foo.scala b/scalasql/namedtuples/test/src/example/foo.scala new file mode 100644 index 00000000..49d337a6 --- /dev/null +++ b/scalasql/namedtuples/test/src/example/foo.scala @@ -0,0 +1,24 @@ +package scalasql.example + +// This file is a simple scratch-pad to demo ideas + +import scalasql.simple.{*, given} +import H2Dialect.* + +case class Person(name: String, age: Int) extends SimpleTable.Nested +object Person extends SimpleTable[Person]() + +case class City(name: String, population: Int, mayor: Person) +object City extends SimpleTable[City]() + +def bar(db: DbApi) = + val m = db.run( + City.select.filter(_.name === "foo").map(c => (name = c.name, mayor = c.mayor)) + ) + val _: Seq[(name: String, mayor: Person)] = m // demonstrate that mayor maps back to case class. + +@main def foo = + City.select.filter(_.name === "foo").map(_.mayor) + City.insert.values(City("foo", 42, Person("bar", 23))) + City.insert.columns(_.name := "foo") + City.insert.batched(_.name, _.population, _.mayor.name)(("foo", 42, "bar"), ("baz", 23, "qux")) diff --git a/scalasql/query/src-3/TableMacro.scala b/scalasql/query/src-3/TableMacro.scala index 8149475a..dc572f2f 100644 --- a/scalasql/query/src-3/TableMacro.scala +++ b/scalasql/query/src-3/TableMacro.scala @@ -47,8 +47,7 @@ object TableMacros { paramTpe match { case AppliedType(tpeCtor, _) => tpeCtor.asType match { - case '[ - type t[_[_]]; t] => + case '[type t[_[_]]; t] => f[t]('{ summonInline[Table.ImplicitMetadata[t]].value }) } } diff --git a/scalasql/test/src/UtestFramework.scala b/scalasql/test/src/UtestFramework.scala index de1d809c..f2b7d5db 100644 --- a/scalasql/test/src/UtestFramework.scala +++ b/scalasql/test/src/UtestFramework.scala @@ -27,12 +27,15 @@ class UtestFramework extends utest.runner.Framework { override def teardown() = { println("Tearing down CustomFramework " + recordedTests.size) val workspaceRoot = os.Path(sys.env("MILL_WORKSPACE_ROOT")) + val recordedTestsFile = os.RelPath(sys.env("SCALASQL_RECORDED_TESTS_NAME")) + val recordedSuiteDescriptionsFile = + os.RelPath(sys.env("SCALASQL_RECORDED_SUITE_DESCRIPTIONS_NAME")) os.write.over( - workspaceRoot / "out" / "recordedTests.json", + workspaceRoot / "out" / recordedTestsFile, upickle.default.write(UtestFramework.recordedTests, indent = 4) ) os.write.over( - workspaceRoot / "out" / "recordedSuiteDescriptions.json", + workspaceRoot / "out" / recordedSuiteDescriptionsFile, upickle.default.write(UtestFramework.recordedSuiteDescriptions, indent = 4) ) recordedTests.clear() diff --git a/scalasql/test/src/WorldSqlTests.scala b/scalasql/test/src/WorldSqlTests.scala index 5433f709..621d2979 100644 --- a/scalasql/test/src/WorldSqlTests.scala +++ b/scalasql/test/src/WorldSqlTests.scala @@ -6,7 +6,7 @@ import java.sql.{JDBCType, PreparedStatement, ResultSet} object WorldSqlTests extends TestSuite { // +DOCS // - // This tutorials is a tour of how to use ScalaSql, from the most basic concepts + // This tutorial is a tour of how to use ScalaSql, from the most basic concepts // to writing some realistic queries. If you are browsing this on Github, you // can open the `Outline` pane on the right to browse the section headers to // see what we will cover and find anything specific of interest to you. @@ -42,8 +42,14 @@ object WorldSqlTests extends TestSuite { // ### Modeling Your Schema // // Next, you need to define your data model classes. In ScalaSql, your data model - // is defined using `case class`es with each field wrapped in the wrapper type - // parameter `T[_]`. This allows us to re-use the same case class to represent + // is defined using `case class`es with each field representing a column in an database table. + // + // There are two flavors to consider: `Table` (available for Scala 2.13+), and `SimpleTable` (Scala 3.7+). + // + // **Using `Table`** + // + // Declare your case class with a type parameter `T[_]`, which is used to wrap the type of each + // field. This allows us to re-use the same case class to represent // both database values (when `T` is `scalasql.Expr`) as well as Scala values // (when `T` is `scalasql.Sc`). // @@ -112,6 +118,27 @@ object WorldSqlTests extends TestSuite { } } } + + object NamedTupleDataTypes { + // +DOCS + // **Using `SimpleTable`** + // > Note: only available in the `com.lihaoyi::scalasql-namedtuples` library, which supports Scala 3.7.0+ + // + // Declare your case class as usual. Inside of queries, the class will be represented by a `Record` with the same fields, but wrapped in `scalasql.Expr`. + // + // Here, we define three classes `Country` `City` and `CountryLanguage`, modeling + // the database tables we saw above. + // + // Also included is the necessary import statement to include the `SimpleTable` definition. + // + // ```scala + // +INCLUDE SNIPPET [IMPORTS] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // + // +INCLUDE SNIPPET [TABLES] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // ``` + // -DOCS + } + def tests = Tests { // +DOCS // ### Creating Your Database Client @@ -193,11 +220,16 @@ object WorldSqlTests extends TestSuite { City[Sc](2, "Qandahar", "AFG", district = "Qandahar", population = 237500), City[Sc](3, "Herat", "AFG", district = "Herat", population = 186800) ) - // Notice that `db.run` returns instances of type `City[Sc]`. `Sc` is `scalasql.Sc`, + // Notice that `db.run` returns instances of type `City[Sc]` (or `City` if using `SimpleTable`). + // + // `Sc` is `scalasql.Sc`, // short for the "Scala" type, representing a `City` object containing normal Scala // values. The `[Sc]` type parameter must be provided explicitly whenever creating, // type-annotating, or otherwise working with these `City` values. // + // > In this tutorial, unless otherwise specified, we will assume usage of the `Table` encoding. + // > If you are using `SimpleTable`, the same code will work, but drop `[Sc]` type arguments. + // // In this example, we do `.take(3)` after running the query to show only the first // 3 table entries for brevity, but by that point the `City.select` query had already // fetched the entire database table into memory. This can be a problem with non-trivial @@ -231,8 +263,12 @@ object WorldSqlTests extends TestSuite { db.run(query) ==> City[Sc](3208, "Singapore", "SGP", district = "", population = 4017733) // Note that we use `===` rather than `==` for the equality comparison. The // function literal passed to `.filter` is given a `City[Expr]` as its parameter, - // representing a `City` that is part of the database query, in contrast to the - // `City[Sc]`s that `db.run` returns , and so `_.name` is of type `Expr[String]` + // (or `Record[City, Expr]` with the `SimpleTable` encoding) representing a `City` + // that is part of the database query, in contrast to the + // `City[Sc]`s that `db.run` returns. + // + // Within a query therefore `_.name` is a field selection on the function parameter, + // resulting in `Expr[String]`, // rather than just `String` or `Sc[String]`. You can use your IDE's // auto-complete to see what operations are available on `Expr[String]`: typically // they will represent SQL string functions rather than Scala string functions and @@ -312,7 +348,8 @@ object WorldSqlTests extends TestSuite { City[Sc](1890, "Shanghai", "CHN", district = "Shanghai", population = 9696300), City[Sc](1891, "Peking", "CHN", district = "Peking", population = 7472000) ) - // Again, all the operations within the query work on `Expr`s: `c` is a `City[Expr]`, + // Again, all the operations within the query work on `Expr`s: + // `c` is a `City[Expr]` (or `Record[City, Expr]` for `SimpleTable`), // `c.population` is an `Expr[Int]`, `c.countryCode` is an `Expr[String]`, and // `===` and `>` and `&&` on `Expr`s all return `Expr[Boolean]`s that represent // a SQL expression that can be sent to the Database as part of your query. @@ -447,6 +484,35 @@ object WorldSqlTests extends TestSuite { "SINGAPORE", 4 // population in millions ) + // + // **Mapping with named tuples** + // > Note: only available in the `com.lihaoyi::scalasql-namedtuples` library, which supports Scala 3.7.0+ + // + // You can also use named tuples to map the results of a query. + // ```scala + // +INCLUDE SNIPPET [MAP-1] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // + // +INCLUDE SNIPPET [MAP-2] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // ``` + // + // **Updating `Record` fields** + // > Note: only relevant when using the `SimpleTable` encoding. + // + // When using `SimpleTable`, within the `.map` query `c` is of type + // `Record[Country, Expr]`. Records are converted back to their associated case class + // (e.g. `Country`) with `db.run`. + // + // If you want to apply updates to any of the fields before returning, the `Record` class + // provides an `updates` method. This lets you provide an arbitrary sequence of updates to + // apply in-order to the record. You can either provide a value with `:=`, + // or provide a function that transforms the old value. For example: + // + // ```scala + // +INCLUDE SNIPPET [MAP-3] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // + // +INCLUDE SNIPPET [MAP-4] scalasql/namedtuples/test/src/example/WorldSqlTestsNamedTuple.scala + // ``` + // // -DOCS } } @@ -1290,7 +1356,9 @@ object WorldSqlTests extends TestSuite { throw new Exception() } - } catch { case e: Exception => /*do nothing*/ } + } catch { + case e: Exception => /*do nothing*/ + } dbClient.transaction { implicit db => db.run(City.select.filter(_.countryCode === "SGP").single) ==> @@ -1336,7 +1404,9 @@ object WorldSqlTests extends TestSuite { db.run(City.select.filter(_.countryCode === "SGP")) ==> Seq() throw new Exception() } - } catch { case e: Exception => /*do nothing*/ } + } catch { + case e: Exception => /*do nothing*/ + } db.run(City.select.filter(_.countryCode === "SGP").single) ==> City[Sc](3208, "Singapore", "SGP", district = "", population = 4017733) @@ -1443,7 +1513,6 @@ object WorldSqlTests extends TestSuite { db.run(City.select.filter(_.id === 313373).single) ==> City[Sc](CityId(313373), "test", "XYZ", "district", 1000000) - // -DOCS // You can also use `TypeMapper#bimap` for the common case where you want the // new `TypeMapper` to behave the same as an existing `TypeMapper`, just with @@ -1485,6 +1554,7 @@ object WorldSqlTests extends TestSuite { db.run(City2.select.filter(_.id === 31337).single) ==> City2[Sc](CityId2(31337), "test", "XYZ", "district", 1000000) + // -DOCS } test("customTableColumnNames") { // +DOCS diff --git a/scalasql/test/src/api/TransactionTests.scala b/scalasql/test/src/api/TransactionTests.scala index a9c9902d..ec71e1c9 100644 --- a/scalasql/test/src/api/TransactionTests.scala +++ b/scalasql/test/src/api/TransactionTests.scala @@ -134,7 +134,9 @@ trait TransactionTests extends ScalaSqlSuite { throw new FooException } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } dbClient.transaction(_.run(Purchase.select.size)) ==> 7 } @@ -324,7 +326,9 @@ trait TransactionTests extends ScalaSqlSuite { db.run(Purchase.select.size) ==> 1 throw new FooException } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } db.run(Purchase.select.size) ==> 3 } @@ -355,7 +359,9 @@ trait TransactionTests extends ScalaSqlSuite { db.run(Purchase.select.size) ==> 1 throw new FooException } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } db.run(Purchase.select.size) ==> 5 } @@ -381,7 +387,9 @@ trait TransactionTests extends ScalaSqlSuite { throw new FooException } } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } db.run(Purchase.select.size) ==> 5 } @@ -409,7 +417,9 @@ trait TransactionTests extends ScalaSqlSuite { throw new FooException } } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } dbClient.transaction(_.run(Purchase.select.size)) ==> 7 } @@ -435,7 +445,9 @@ trait TransactionTests extends ScalaSqlSuite { db.run(Purchase.select.size) ==> 5 } - } catch { case e: FooException => /*donothing*/ } + } catch { + case e: FooException => /*donothing*/ + } dbClient.transaction(_.run(Purchase.select.size)) ==> 7 }