From 9143a0be74928b1059392f23dacc7a64654240d4 Mon Sep 17 00:00:00 2001 From: kbuilder Date: Mon, 29 Jul 2024 09:07:58 -0700 Subject: [PATCH] Release 0.2.0. --- CHANGES.md | 2 +- README.md | 8 ++++---- pom.xml | 2 +- spark-bigtable_2.12-it/pom.xml | 6 +++--- spark-bigtable_2.12/pom.xml | 4 ++-- .../cloud/spark/bigtable/BigtableDefaultSource.scala | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 13e2d91..dde0182 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,6 @@ # Release Notes -## Next +## 0.2.0 - 2024-07-29 * PR #33: Add support for more primitive types, i.e., Int, Float, etc. diff --git a/README.md b/README.md index 185bfef..92a34b9 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ You can access the connector in two different ways: In Java and Scala applications, you can use different dependency management tools (e.g., Maven, sbt, or Gradle) to access the connector `com.google.cloud.spark.bigtable:spark-bigtable_2.12:`( -current `` is `0.1.1`) and package it inside your application JAR using +current `` is `0.2.0`) and package it inside your application JAR using libraries such as Maven Shade Plugin. For PySpark applications, you can use the `--jars` flag to pass the GCS address of the connector when submitting it. @@ -28,14 +28,14 @@ For Maven, you can add the following snippet to your `pom.xml` file: com.google.cloud.spark.bigtable spark-bigtable_2.12 - 0.1.1 + 0.2.0 ``` For sbt, you can add the following to your `build.sbt` file: ``` -libraryDependencies += "com.google.cloud.spark.bigtable" % "spark-bigtable_2.12" % "0.1.1" +libraryDependencies += "com.google.cloud.spark.bigtable" % "spark-bigtable_2.12" % "0.2.0" ``` Finally, you can add the following to your `build.gradle` file when using @@ -43,7 +43,7 @@ Gradle: ``` dependencies { -implementation group: 'com.google.cloud.bigtable', name: 'spark-bigtable_2.12', version: '0.1.1' +implementation group: 'com.google.cloud.bigtable', name: 'spark-bigtable_2.12', version: '0.2.0' } ``` diff --git a/pom.xml b/pom.xml index 2bf481b..f39db15 100644 --- a/pom.xml +++ b/pom.xml @@ -21,7 +21,7 @@ com.google.cloud.spark.bigtable spark-bigtable-connector pom - 0.1.1 + 0.2.0 Spark Bigtable Connector Build Parent Parent project for all the Spark Bigtable Connector artifacts https://github.com/GoogleCloudDataproc/spark-bigtable-connector diff --git a/spark-bigtable_2.12-it/pom.xml b/spark-bigtable_2.12-it/pom.xml index 219e554..7c8e48b 100644 --- a/spark-bigtable_2.12-it/pom.xml +++ b/spark-bigtable_2.12-it/pom.xml @@ -21,14 +21,14 @@ com.google.cloud.spark.bigtable spark-bigtable-connector - 0.1.1 + 0.2.0 ../ com.google.cloud.spark.bigtable spark-bigtable_2.12-it Google Bigtable - Spark Connector Integration Tests - 0.1.1 + 0.2.0 @@ -52,7 +52,7 @@ com.google.cloud.spark.bigtable spark-bigtable_2.12 - 0.1.1 + 0.2.0 diff --git a/spark-bigtable_2.12/pom.xml b/spark-bigtable_2.12/pom.xml index 0e8db20..2392d67 100644 --- a/spark-bigtable_2.12/pom.xml +++ b/spark-bigtable_2.12/pom.xml @@ -21,14 +21,14 @@ com.google.cloud.spark.bigtable spark-bigtable-connector - 0.1.1 + 0.2.0 ../ com.google.cloud.spark.bigtable spark-bigtable_2.12 Google Bigtable - Apache Spark Connector - 0.1.1 + 0.2.0 diff --git a/spark-bigtable_2.12/src/main/scala/com/google/cloud/spark/bigtable/BigtableDefaultSource.scala b/spark-bigtable_2.12/src/main/scala/com/google/cloud/spark/bigtable/BigtableDefaultSource.scala index 6b2066f..e7b0ebe 100644 --- a/spark-bigtable_2.12/src/main/scala/com/google/cloud/spark/bigtable/BigtableDefaultSource.scala +++ b/spark-bigtable_2.12/src/main/scala/com/google/cloud/spark/bigtable/BigtableDefaultSource.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode, Row => SparkRow} import org.apache.yetus.audience.InterfaceAudience object VersionInformation { - val CONNECTOR_VERSION = "0.1.1" // ${NEXT_VERSION_FLAG} + val CONNECTOR_VERSION = "0.2.0" // ${NEXT_VERSION_FLAG} val DATA_SOURCE_VERSION = "V1" val scalaVersion = util.Properties.versionNumberString // This remains unset only in unit tests where sqlContext is null.