diff --git a/README.md b/README.md index bae7c582..64ae04f6 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # SingleStoreDB Spark Connector -## Version: 4.1.7 [![License](http://img.shields.io/:license-Apache%202-brightgreen.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) +## Version: 4.1.8-beta [![License](http://img.shields.io/:license-Apache%202-brightgreen.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) ## Getting Started @@ -13,13 +13,13 @@ spark-packages.org. The group is `com.singlestore` and the artifact is You can add the connector to your Spark application using: spark-shell, pyspark, or spark-submit ``` -$SPARK_HOME/bin/spark-shell --packages com.singlestore:singlestore-spark-connector_2.12:4.1.7-spark-3.5.0 +$SPARK_HOME/bin/spark-shell --packages com.singlestore:singlestore-spark-connector_2.12:4.1.8-beta-spark-3.5.0 ``` We release multiple versions of the `singlestore-spark-connector`, one for each supported Spark version. The connector follows the `x.x.x-spark-y.y.y` naming convention, where `x.x.x` represents the connector version and `y.y.y` represents the corresponding Spark version. -For example, in connector `4.1.7-spark-3.5.0`, 4.1.7 is the version of the connector, +For example, in connector `4.1.8-beta-spark-3.5.0`, 4.1.8-beta is the version of the connector, compiled and tested against Spark version 3.5.0. It is critical to select the connector version that corresponds to the Spark version in use. diff --git a/build.sbt b/build.sbt index d6ec8c96..68737e38 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import xerial.sbt.Sonatype._ To run tests or publish with a specific spark version use this java option: -Dspark.version=3.0.0 */ -val sparkVersion = sys.props.get("spark.version").getOrElse("3.5.0") +val sparkVersion = sys.props.get("spark.version").getOrElse("3.1.3") val scalaVersionStr = "2.12.12" val scalaVersionPrefix = scalaVersionStr.substring(0, 4) val jacksonDatabindVersion = sparkVersion match { @@ -30,7 +30,7 @@ lazy val root = project case "3.4.2" => "scala-sparkv3.4" case "3.5.0" => "scala-sparkv3.5" }), - version := s"4.1.7-spark-${sparkVersion}", + version := s"4.1.8-beta-spark-${sparkVersion}", licenses += "Apache-2.0" -> url( "http://opensource.org/licenses/Apache-2.0" ), diff --git a/demo/notebook/pyspark-singlestore-demo_2F8XQUKFG.zpln b/demo/notebook/pyspark-singlestore-demo_2F8XQUKFG.zpln index 8a66c516..3d68595e 100644 --- a/demo/notebook/pyspark-singlestore-demo_2F8XQUKFG.zpln +++ b/demo/notebook/pyspark-singlestore-demo_2F8XQUKFG.zpln @@ -45,7 +45,7 @@ }, { "title": "Configure Spark", - "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.7-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", + "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.8-beta-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", "user": "anonymous", "dateUpdated": "2022-07-06 11:26:15.232", "progress": 0, diff --git a/demo/notebook/scala-singlestore-demo_2F6Y3APTX.zpln b/demo/notebook/scala-singlestore-demo_2F6Y3APTX.zpln index 3855ba97..f3d36781 100644 --- a/demo/notebook/scala-singlestore-demo_2F6Y3APTX.zpln +++ b/demo/notebook/scala-singlestore-demo_2F6Y3APTX.zpln @@ -45,7 +45,7 @@ }, { "title": "Configure Spark", - "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.7-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", + "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.8-beta-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", "user": "anonymous", "dateUpdated": "2022-07-06 11:31:08.311", "progress": 0, diff --git a/demo/notebook/spark-sql-singlestore-demo_2F7PZ81H6.zpln b/demo/notebook/spark-sql-singlestore-demo_2F7PZ81H6.zpln index 8c74a1df..31c7e865 100644 --- a/demo/notebook/spark-sql-singlestore-demo_2F7PZ81H6.zpln +++ b/demo/notebook/spark-sql-singlestore-demo_2F7PZ81H6.zpln @@ -45,7 +45,7 @@ }, { "title": "Configure Spark", - "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.7-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", + "text": "%spark.conf\n\n// Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths\nspark.jars.packages com.singlestore:singlestore-spark-connector_2.12:4.1.8-beta-spark-3.5.0\n\n// The hostname or IP address of the SingleStore Master Aggregator in the `host[:port]` format, where port is an optional parameter\n// singlestore-ciab-for-zeppelin - hostname of the docker created by https://hub.docker.com/r/singlestore/cluster-in-a-box\n// 3306 - port on which SingleStore Master Aggregator is started\nspark.datasource.singlestore.ddlEndpoint singlestore-ciab-for-zeppelin:3306\n\n// The hostname or IP address of SingleStore Aggregator nodes to run queries against in the `host[:port],host[:port],...` format, \n// where :port is an optional parameter (multiple hosts separated by comma) (default: ddlEndpoint)\n// Example\n// spark.datasource.singlestore.dmlEndpoints child-agg:3308,child-agg2\nspark.datasource.singlestore.dmlEndpoints singlestore-ciab-for-zeppelin:3306\n\n// SingleStore username (default: root)\nspark.datasource.singlestore.user root\n\n// SingleStore password (default: no password)\nspark.datasource.singlestore.password my_password", "user": "anonymous", "dateUpdated": "2022-07-06 11:32:22.885", "progress": 0,