Skip to content

Core/Scala3 #54

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Apr 11, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions .github/workflows/bigquery-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ jobs:
service_account_key: ${{ secrets.BQ_SERVICE_ACCOUNT_64 }}
export_default_credentials: true
- name: Integration Tests
run: sbt -v -Dfile.encoding=UTF-8 coverage +it:test
- name: Coverage Report
run: sbt -v -Dfile.encoding=UTF-8 coverageReport
- name: Codecov
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: false # optional (default = false)
flags: bq-integration
run: sbt -v -Dfile.encoding=UTF-8 +it:test
#- name: Coverage Report
# run: sbt -v -Dfile.encoding=UTF-8 coverageReport
#- name: Codecov
# uses: codecov/codecov-action@v1
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# verbose: false # optional (default = false)
# flags: bq-integration
18 changes: 9 additions & 9 deletions .github/workflows/ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ jobs:
with:
java-version: "[email protected]"
- name: Build and Unit Tests
run: sbt -v -Dfile.encoding=UTF-8 +clean coverage +test
- name: Coverage Report
run: sbt -v -Dfile.encoding=UTF-8 coverageReport
- name: Codecov
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: false # optional (default = false)
flags: unittests
run: sbt -v -Dfile.encoding=UTF-8 +clean +test
#- name: Coverage Report
# run: sbt -v -Dfile.encoding=UTF-8 coverageReport
#- name: Codecov
# uses: codecov/codecov-action@v1
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# verbose: false # optional (default = false)
# flags: unittests
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package org.datatools.bigdatatypes.bigquery

import com.google.cloud.bigquery.Field
import org.datatools.bigdatatypes.basictypes.SqlType
import org.datatools.bigdatatypes.conversions.SqlInstanceConversion
import org.datatools.bigdatatypes.formats.Formats
import org.datatools.bigdatatypes.types.basic.SqlType

/** Type class to convert generic SqlTypes received as instance into BigQuery specific fields
* This uses [[SqlTypeToBigQuery]] to create BigQuery Fields
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ package org.datatools.bigdatatypes.bigquery

import com.google.cloud.bigquery.Field.Mode
import com.google.cloud.bigquery.{Field, StandardSQLTypeName}
import org.datatools.bigdatatypes.basictypes._
import org.datatools.bigdatatypes.conversions._
import org.datatools.bigdatatypes.formats.Formats
import org.datatools.bigdatatypes.types.basic
import org.datatools.bigdatatypes.types.basic._

/** Type class to convert generic SqlTypes into BigQuery specific fields
* In BigQuery, a table is made with a List of fields so as an example:
Expand Down Expand Up @@ -41,7 +40,7 @@ object SqlTypeToBigQuery {
def getSchema(sqlType: SqlType)(implicit f: Formats): List[Field] = sqlType match {
case SqlStruct(Nil, _) => Nil
case SqlStruct((name, sqlType) :: records, mode) =>
getSchemaWithName(f.transformKey(name, sqlType), sqlType) :: getSchema(basic.SqlStruct(records, mode))
getSchemaWithName(f.transformKey(name, sqlType), sqlType) :: getSchema(SqlStruct(records, mode))
}

/** Basic SqlTypes conversions to BigQuery Fields
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ package org.datatools.bigdatatypes.bigquery

import com.google.cloud.bigquery.Field
import org.datatools.bigdatatypes.TestTypes._
import org.datatools.bigdatatypes.basictypes.SqlType
import org.datatools.bigdatatypes.bigquery.SqlInstanceToBigQuery.InstanceSyntax
import org.datatools.bigdatatypes.conversions.SqlTypeConversion
import org.datatools.bigdatatypes.conversions.SqlTypeConversion._
import org.datatools.bigdatatypes.formats.Formats.implicitDefaultFormats
import org.datatools.bigdatatypes.types.basic.SqlType
import org.datatools.bigdatatypes.{BigQueryTestTypes, UnitSpec}

/** These tests defines how to convert an SqlType instance into BigQueryFields
Expand Down
60 changes: 44 additions & 16 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
//used to build Sonatype releases
lazy val versionNumber = "0.3.1"
lazy val versionNumber = "0.3.2"
lazy val projectName = "big-data-types"
version := versionNumber
name := projectName

lazy val scala213 = "2.13.5"
lazy val scala212 = "2.12.12"
lazy val scala211 = "2.11.12"
lazy val supportedScalaVersions = List(scala213, scala212)
scalaVersion := scala212
lazy val scala3 = "3.0.0-RC2"
lazy val supportedScalaVersions = List(scala3, scala213, scala212)
scalaVersion := scala213

assemblyMergeStrategy in assembly := {
assembly / assemblyMergeStrategy := {
case PathList("META-INF", xs @ _*) => MergeStrategy.discard
case x => MergeStrategy.first
}
Expand All @@ -29,19 +29,27 @@ lazy val publishSettings = Seq(
licenses := Seq("APL2" -> url("http://www.apache.org/licenses/LICENSE-2.0.txt")),
publishMavenStyle := true
)
lazy val noPublishSettings =
skip in publish := true

lazy val noPublishSettings = {
publish / skip := true
}

publishSettings

//Dependencies
lazy val coreDependencies = Seq(
lazy val coreDependencies2 = Seq(
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.clapper" %% "grizzled-slf4j" % "1.3.4",
"com.chuusai" %% "shapeless" % "2.3.3",
scalatest % Test
)

lazy val coreDependencies3 = Seq(
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.clapper" % "grizzled-slf4j_2.13" % "1.3.4",
scalatest % Test
)

lazy val bigqueryDependencies = Seq(
"com.google.cloud" % "google-cloud-bigquery" % "1.127.11",
scalatest % "it,test"
Expand All @@ -57,7 +65,7 @@ lazy val scalatest = "org.scalatest" %% "scalatest" % "3.2.7"
//Project settings
lazy val root = (project in file("."))
.configs(IntegrationTest)
.settings(noPublishSettings)
.settings(noPublishSettings, crossScalaVersions := Nil)
.aggregate(
core,
bigquery,
Expand All @@ -69,8 +77,15 @@ lazy val core = (project in file("core")).settings(
name := projectName + "-core",
publishSettings,
crossScalaVersions := supportedScalaVersions,
crossVersionSharedSources,
libraryDependencies ++= coreDependencies
crossVersionSharedSourcesScala3, //different one for Scala 2 or 3
//for Scala 2 or 3
libraryDependencies ++= {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, _)) => coreDependencies2
case Some((3, _)) => coreDependencies3
case _ => Nil
}
}
)

lazy val bigquery = (project in file("bigquery"))
Expand All @@ -79,7 +94,7 @@ lazy val bigquery = (project in file("bigquery"))
name := projectName + "-bigquery",
publishSettings,
Defaults.itSettings,
crossScalaVersions := supportedScalaVersions,
crossScalaVersions := List(scala212, scala213),
crossVersionSharedSources,
libraryDependencies ++= bigqueryDependencies
)
Expand Down Expand Up @@ -112,18 +127,31 @@ lazy val examples = (project in file("examples"))
)
.dependsOn(spark % "test->test;compile->compile")



lazy val crossVersionSharedSources: Seq[Setting[_]] =
Seq(Compile, Test).map { sc =>
(unmanagedSourceDirectories in sc) ++= {
(unmanagedSourceDirectories in sc).value.flatMap { dir: File =>
(sc / unmanagedSourceDirectories) ++= {
(sc / unmanagedSourceDirectories).value.flatMap { dir: File =>
if (dir.getName != "scala") Seq(dir)
else
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) => Seq(new File(dir.getPath + "_3"))
case Some((2, y)) if y >= 13 => Seq(new File(dir.getPath + "_2.13+"))
case Some((2, y)) if y >= 11 => Seq(new File(dir.getPath + "_2.13-"))
}
}
}
}

lazy val crossVersionSharedSourcesScala3: Seq[Setting[_]] =
Seq(Compile, Test).map { sc =>
(sc / unmanagedSourceDirectories) ++= {
(sc / unmanagedSourceDirectories).value.flatMap { dir: File =>
if (dir.getName != "scala") Seq(dir)
else
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) => Seq(new File(dir.getPath + "_3"))
case Some((2, _)) => Seq(new File(dir.getPath + "_2"))
}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.datatools.bigdatatypes.types.basic
package org.datatools.bigdatatypes.basictypes

/** Abstract representation of the type of a generic SQL database */
sealed trait SqlType {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.datatools.bigdatatypes.types.basic
package org.datatools.bigdatatypes.basictypes

/** The mode of a sql type. e.g: Required, Nullable, Repeated.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.datatools.bigdatatypes.conversions

import org.datatools.bigdatatypes.types.basic._
import org.datatools.bigdatatypes.basictypes.SqlType

/** Type class to convert instances into [[SqlType]]
*
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package org.datatools.bigdatatypes.conversions

import java.sql.{Date, Timestamp}
import org.datatools.bigdatatypes.basictypes._

import org.datatools.bigdatatypes.types.basic._
import java.sql.{Date, Timestamp}
import shapeless._
import shapeless.labelled.FieldType

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.datatools.bigdatatypes.formats

import org.datatools.bigdatatypes.types.basic.SqlType
import org.datatools.bigdatatypes.basictypes.SqlType

trait Formats {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.datatools.bigdatatypes.formats

import org.datatools.bigdatatypes.types.basic.{SqlBool, SqlDate, SqlTimestamp, SqlType}
import org.datatools.bigdatatypes.basictypes._

/** Converts CamelCase field names to snake_case
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.datatools.bigdatatypes.formats

import org.datatools.bigdatatypes.types.basic.SqlType
import org.datatools.bigdatatypes.basictypes.SqlType

/** Converts CamelCase field names to snake_case
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package org.datatools.bigdatatypes.basictypes

import org.datatools.bigdatatypes.basictypes.SqlType.{SqlBool, SqlDate, SqlDecimal, SqlDouble, SqlFloat, SqlInt, SqlLong, SqlString, SqlStruct, SqlTimestamp}
import org.datatools.bigdatatypes.basictypes.SqlTypeMode._

/** Abstract representation of the type of a generic SQL database */
enum SqlType {

/** @return the [[SqlTypeMode]] of this SqlType
*/
def mode: SqlTypeMode

/** Promotes the type to a new mode if the conversion makes sense. e.g:
* [[List[Option[String] ] ]] and [[Option[List[String] ] ]] should be SqlString(Repeated)
*
* @param mode the mode we want to convert to
* @return a new [[SqlType]] with the mode
*/
def changeMode(mode: SqlTypeMode): SqlType =
if (this.mode.isValidConversion(mode))
this match {
case SqlInt(_) => SqlInt(mode)
case SqlLong(_) => SqlLong(mode)
case SqlFloat(_) => SqlFloat(mode)
case SqlDouble(_) => SqlDouble(mode)
case SqlDecimal(_) => SqlDecimal(mode)
case SqlBool(_) => SqlBool(mode)
case SqlString(_) => SqlString(mode)
case SqlTimestamp(_) => SqlTimestamp(mode)
case SqlDate(_) => SqlDate(mode)
case SqlStruct(records, _) => SqlStruct(records, mode)
}
else this

case SqlInt(mode: SqlTypeMode = Required)
case SqlLong(mode: SqlTypeMode = Required)
case SqlFloat(mode: SqlTypeMode = Required)
case SqlDouble(mode: SqlTypeMode = Required)
case SqlDecimal(mode: SqlTypeMode = Required)
case SqlBool(mode: SqlTypeMode = Required)
case SqlString(mode: SqlTypeMode = Required)
case SqlTimestamp(mode: SqlTypeMode = Required)
case SqlDate(mode: SqlTypeMode = Required)
case SqlStruct(records: List[(String, SqlType)], mode: SqlTypeMode = Required)
}


Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package org.datatools.bigdatatypes.basictypes

import org.datatools.bigdatatypes.basictypes.SqlTypeMode.{Nullable, Repeated, Required}

/** The mode of a sql type. e.g: Required, Nullable, Repeated.
*/
enum SqlTypeMode {

/** Tells you if you can change the mode for another.
*
* e.g:
* [[[Option[String] ]] should be SqlString(Nullable)
* [[List[Option[String] ] ]] should be SqlString(Repeated).
*
* @param newMode the mode we want to convert to
* @return true if this conversion makes sense, false if not
*/
def isValidConversion(newMode: SqlTypeMode): Boolean = (this, newMode) match {
case (Repeated, _) => false
case (Nullable, Required) => false
case (_, _) => true
}

/** Nullable field */
case Nullable

/** Repeated or array field */
case Repeated

/** Mandatory field */
case Required
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package org.datatools.bigdatatypes.conversions

import org.datatools.bigdatatypes.basictypes.SqlType

/** Type class to convert instances into [[SqlType]]
*
* @tparam A is a Scala type
*/
trait SqlInstanceConversion[-A] {

/**
* @param value an instance that implements SqlInstanceConversion
* @return the [[SqlType]] representation of [[A]]
*/
def getType(value: A): SqlType
}

object SqlInstanceConversion {

/** Summoner method
*/
def apply[A](using a: SqlInstanceConversion[A]): SqlInstanceConversion[A] = a

}

Loading