From 69aebb364d416b17ff581cea49e1ed81669a921c Mon Sep 17 00:00:00 2001 From: thekingofcity Date: Mon, 15 Mar 2021 14:02:36 +0800 Subject: [PATCH 1/5] [Issue-461] Remove Pravega submodule (#470) Signed-off-by: zhongle.wang --- .gitmodules | 4 ---- documentation/src/docs/getting-started.md | 4 ++-- gradle.properties | 3 --- pravega | 1 - settings.gradle | 26 ----------------------- 5 files changed, 2 insertions(+), 36 deletions(-) delete mode 100644 .gitmodules delete mode 160000 pravega diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 654b561f..00000000 --- a/.gitmodules +++ /dev/null @@ -1,4 +0,0 @@ -[submodule "pravega"] - path = pravega - url = https://github.com/pravega/pravega.git - branch = master diff --git a/documentation/src/docs/getting-started.md b/documentation/src/docs/getting-started.md index 9188cc74..fad69881 100644 --- a/documentation/src/docs/getting-started.md +++ b/documentation/src/docs/getting-started.md @@ -28,12 +28,12 @@ The connectors can be used to build end-to-end stream processing pipelines (see Building the connectors from the source is only necessary when we want to use or contribute to the latest (*unreleased*) version of the Pravega Flink connectors. -The connector project is linked to a specific version of Pravega, based on a [git submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules) pointing to a commit-id. By default the sub-module option is disabled and the build step will make use of the Pravega version defined in the `gradle.properties` file. You could override this option by enabling `usePravegaVersionSubModule` flag in `gradle.properties` to `true`. +The connector project is linked to a specific version of Pravega and the version is defined in the `gradle.properties` file (`pravegaVersion`). Checkout the source code repository by following below steps: ``` -git clone --recursive https://github.com/pravega/flink-connectors.git +git clone https://github.com/pravega/flink-connectors.git ``` After cloning the repository, the project can be built by running the below command in the project root directory `flink-connectors`. diff --git a/gradle.properties b/gradle.properties index 3e218a43..4467e997 100644 --- a/gradle.properties +++ b/gradle.properties @@ -34,9 +34,6 @@ pravegaVersion=0.10.0-2815.7461614-SNAPSHOT schemaRegistryVersion=0.2.0-50.7d32981-SNAPSHOT apacheCommonsVersion=3.7 -# flag to indicate if Pravega sub-module should be used instead of the version defined in 'pravegaVersion' -usePravegaVersionSubModule=false - # These properties are only needed for publishing to maven central # Pravega Signing Key signing.keyId= diff --git a/pravega b/pravega deleted file mode 160000 index 74616144..00000000 --- a/pravega +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 746161442fcc6dad18154d378af6ca5680dc1155 diff --git a/settings.gradle b/settings.gradle index 049a5c87..702c8907 100644 --- a/settings.gradle +++ b/settings.gradle @@ -19,29 +19,3 @@ pluginManagement { } rootProject.name='flink-connectors' - - - -includeBuild('pravega') { - if (usePravegaVersionSubModule.toBoolean()) { - dependencySubstitution { - substitute module('io.pravega:pravega-client') with project(':client') - substitute module('io.pravega:pravega-common') with project(':common') - substitute module('io.pravega:pravega-controller') with project(':controller') - substitute module('io.pravega:pravega-segmentstore') with project(':segmentstore') - substitute module('io.pravega:pravega-segmentstore-contracts') with project(':segmentstore:contracts') - substitute module('io.pravega:pravega-segmentstore-server') with project(':segmentstore:server') - substitute module('io.pravega:pravega-segmentstore-server-host') with project(':segmentstore:server:host') - substitute module('io.pravega:pravega-segmentstore-storage') with project(':segmentstore:storage') - substitute module('io.pravega:pravega-segmentstore-storage-impl') with project(':segmentstore:storage:impl') - substitute module('io.pravega:pravega-shared') with project(':shared') - substitute module('io.pravega:pravega-shared-controller-api') with project(':shared:controller-api') - substitute module('io.pravega:pravega-shared-metrics') with project(':shared:metrics') - substitute module('io.pravega:pravega-shared-protocol') with project(':shared:protocol') - substitute module('io.pravega:pravega-standalone') with project(':standalone') - - substitute module('io.pravega:pravega-segmentstore-storage-impl') with project(':segmentstore:storage:impl') - substitute module('io.pravega:pravega-authplugin') with project(':shared:authplugin') - } - } -} From cd572e4b53b70ba07f9bb8f7ad558ed2ae0248ee Mon Sep 17 00:00:00 2001 From: Brian Zhou Date: Thu, 18 Mar 2021 09:23:32 +0800 Subject: [PATCH 2/5] [issue-435] Upgrade to Flink 1.12 (#468) Signed-off-by: Brian Zhou --- gradle.properties | 2 +- .../flink/FlinkPravegaTableSink.java | 2 +- .../flink/FlinkPravegaTableSource.java | 2 +- .../FlinkPravegaReaderRGStateITCase.java | 2 + .../flink/FlinkPravegaTableSinkTest.java | 10 ++++- .../FlinkPravegaDynamicTableFactoryTest.java | 38 ++++++++++++------- .../table/FlinkPravegaDynamicTableITCase.java | 6 +-- 7 files changed, 39 insertions(+), 23 deletions(-) diff --git a/gradle.properties b/gradle.properties index 4467e997..c86c542e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -10,7 +10,7 @@ # 3rd party Versions. checkstyleToolVersion=7.1 -flinkVersion=1.11.2 +flinkVersion=1.12.2 flinkScalaVersion=2.12 jacksonVersion=2.8.9 lombokVersion=1.18.4 diff --git a/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSink.java b/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSink.java index 7f89775d..71bb7c99 100644 --- a/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSink.java +++ b/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSink.java @@ -64,7 +64,7 @@ protected FlinkPravegaTableSink(Function> w TableSchema schema) { this.writerFactory = Preconditions.checkNotNull(writerFactory, "writerFactory"); this.outputFormatFactory = Preconditions.checkNotNull(outputFormatFactory, "outputFormatFactory"); - this.schema = TableSchemaUtils.checkNoGeneratedColumns(schema); + this.schema = TableSchemaUtils.checkOnlyPhysicalColumns(schema); } /** diff --git a/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSource.java b/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSource.java index 94eabe2e..04b1228b 100644 --- a/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSource.java +++ b/src/main/java/io/pravega/connectors/flink/FlinkPravegaTableSource.java @@ -70,7 +70,7 @@ protected FlinkPravegaTableSource( TableSchema schema) { this.sourceFunctionFactory = checkNotNull(sourceFunctionFactory, "sourceFunctionFactory"); this.inputFormatFactory = checkNotNull(inputFormatFactory, "inputFormatFactory"); - this.schema = TableSchemaUtils.checkNoGeneratedColumns(schema); + this.schema = TableSchemaUtils.checkOnlyPhysicalColumns(schema); } /** diff --git a/src/test/java/io/pravega/connectors/flink/FlinkPravegaReaderRGStateITCase.java b/src/test/java/io/pravega/connectors/flink/FlinkPravegaReaderRGStateITCase.java index 6110ae77..3a6db202 100644 --- a/src/test/java/io/pravega/connectors/flink/FlinkPravegaReaderRGStateITCase.java +++ b/src/test/java/io/pravega/connectors/flink/FlinkPravegaReaderRGStateITCase.java @@ -35,6 +35,7 @@ import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; @@ -74,6 +75,7 @@ public static void tearDownPravega() throws Exception { SETUP_UTILS.stopAllServices(); } + @Ignore("Test failed due to https://issues.apache.org/jira/browse/FLINK-21178") @Test public void testReaderState() throws Exception { diff --git a/src/test/java/io/pravega/connectors/flink/FlinkPravegaTableSinkTest.java b/src/test/java/io/pravega/connectors/flink/FlinkPravegaTableSinkTest.java index ecfb7d07..debb697b 100644 --- a/src/test/java/io/pravega/connectors/flink/FlinkPravegaTableSinkTest.java +++ b/src/test/java/io/pravega/connectors/flink/FlinkPravegaTableSinkTest.java @@ -34,7 +34,8 @@ import org.junit.Test; import java.net.URI; -import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; @@ -169,8 +170,13 @@ public TransformationMock(String name, TypeInformation outputType, int para } @Override - public Collection> getTransitivePredecessors() { + public List> getTransitivePredecessors() { return null; } + + @Override + public List> getInputs() { + return Collections.emptyList(); + } } } \ No newline at end of file diff --git a/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableFactoryTest.java b/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableFactoryTest.java index 06b2fe53..60894a88 100644 --- a/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableFactoryTest.java +++ b/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableFactoryTest.java @@ -61,7 +61,7 @@ import java.util.Optional; import java.util.function.Consumer; -import static org.apache.flink.util.CoreMatchers.containsCause; +import static org.apache.flink.core.testutils.FlinkMatchers.containsCause; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -124,7 +124,8 @@ public void testStreamingTableSource() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); // Test scan source equals final FlinkPravegaDynamicTableSource expectedPravegaSource = new FlinkPravegaDynamicTableSource( @@ -192,7 +193,8 @@ public void testBatchTableSource() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); // Test scan source equals final FlinkPravegaDynamicTableSource expectedPravegaSource = new FlinkPravegaDynamicTableSource( @@ -259,7 +261,8 @@ public void testTableSink() { objectIdentifier, sinkTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); final FlinkPravegaDynamicTableSink expectedSink = new FlinkPravegaDynamicTableSink( TableSchemaUtils.getPhysicalSchema(SINK_SCHEMA), @@ -291,7 +294,8 @@ public void testTableSinkProvider() { objectIdentifier, sinkTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); final FlinkPravegaDynamicTableSink sink = new FlinkPravegaDynamicTableSink( TableSchemaUtils.getPhysicalSchema(SINK_SCHEMA), @@ -336,7 +340,8 @@ public void testInvalidScanExecutionType() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -359,7 +364,8 @@ public void testMissingReaderGroupName() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -383,7 +389,8 @@ public void testNegativeMaxCheckpointRequest() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -406,7 +413,8 @@ public void testMissingSourceStream() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -431,7 +439,8 @@ public void testInvalidStartStreamCuts() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -456,7 +465,8 @@ public void testInvalidEndStreamCuts() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -480,7 +490,8 @@ public void testInvalidSinkSemantic() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } @Test @@ -503,7 +514,8 @@ public void testMissingSinkStream() { objectIdentifier, catalogTable, new Configuration(), - Thread.currentThread().getContextClassLoader()); + Thread.currentThread().getContextClassLoader(), + false); } // -------------------------------------------------------------------------------------------- diff --git a/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableITCase.java b/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableITCase.java index 5b37233d..9ea2c640 100644 --- a/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableITCase.java +++ b/src/test/java/io/pravega/connectors/flink/dynamic/table/FlinkPravegaDynamicTableITCase.java @@ -121,11 +121,7 @@ public void testPravegaSourceSink() throws Exception { " AS orders (price, currency, d, t, ts)"; // Write stream, Block until data is ready or job finished - tEnv.executeSql(initialValues) - .getJobClient() - .get() - .getJobExecutionResult(Thread.currentThread().getContextClassLoader()) - .get(); + tEnv.executeSql(initialValues).await(); // ---------- Read stream from Pravega ------------------- From 10c96ccf834f30495f153b2eac9d5534ca96b2db Mon Sep 17 00:00:00 2001 From: fyang86 <48351067+fyang86@users.noreply.github.com> Date: Thu, 18 Mar 2021 16:11:20 +0800 Subject: [PATCH 3/5] [issue-467] Improve the readme doc with compatibility matrix (#473) Signed-off-by: Fan, Yang --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f5d7d0c4..588bce72 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,19 @@ The connectors can be used to build end-to-end stream processing pipelines (see - Table API support to access Pravega Streams for both **Batch** and **Streaming** use case. +## Compatibility Matrix +The [master](https://github.com/pravega/flink-connectors) branch will always have the most recent +supported versions of Flink and Pravega. + +| Git Branch | Pravega Version | Java Version To Build Connector | Java Version To Run Connector | Flink Version | Status | Artifact Link | +|-------------------------------------------------------------------------------------|------|---------|--------------|------|-------------------|----------------------------------------------------------------------------------------| +| [master](https://github.com/pravega/flink-connectors) | 0.10 | Java 11 | Java 8 or 11 | 1.12 | Under Development | http://oss.jfrog.org/jfrog-dependencies/io/pravega/pravega-connectors-flink-1.12_2.12/ | +| [r0.10-flink1.11](https://github.com/pravega/flink-connectors/tree/r0.10-flink1.11) | 0.10 | Java 11 | Java 8 or 11 | 1.11 | Under Development | http://oss.jfrog.org/jfrog-dependencies/io/pravega/pravega-connectors-flink-1.11_2.12/ | +| [r0.10-flink1.10](https://github.com/pravega/flink-connectors/tree/r0.10-flink1.10) | 0.10 | Java 11 | Java 8 or 11 | 1.10 | Under Development | http://oss.jfrog.org/jfrog-dependencies/io/pravega/pravega-connectors-flink-1.10_2.12/ | +| [r0.9](https://github.com/pravega/flink-connectors/tree/r0.9) | 0.9 | Java 11 | Java 8 or 11 | 1.11 | Released | https://repo1.maven.org/maven2/io/pravega/pravega-connectors-flink-1.11_2.12/0.9.0/ | +| [r0.9-flink1.10](https://github.com/pravega/flink-connectors/tree/r0.9-flink1.10) | 0.9 | Java 11 | Java 8 or 11 | 1.10 | Released | https://repo1.maven.org/maven2/io/pravega/pravega-connectors-flink-1.10_2.12/0.9.0/ | +| [r0.9-flink1.9](https://github.com/pravega/flink-connectors/tree/r0.9-flink1.9) | 0.9 | Java 11 | Java 8 or 11 | 1.9 | Released | https://repo1.maven.org/maven2/io/pravega/pravega-connectors-flink-1.9_2.12/0.9.0/ | + ## Documentation To learn more about how to build and use the Flink Connector library, follow the connector documentation [here](http://pravega.io/). @@ -33,4 +46,3 @@ More examples on how to use the connectors with Flink application can be found i Flink connectors for Pravega is 100% open source and community-driven. All components are available under [Apache 2 License](https://www.apache.org/licenses/LICENSE-2.0.html) on GitHub. - From da90030a7426945bb9d74c8de9020d6a937b9f82 Mon Sep 17 00:00:00 2001 From: fyang86 <48351067+fyang86@users.noreply.github.com> Date: Fri, 19 Mar 2021 22:19:55 +0800 Subject: [PATCH 4/5] [issue-472] Add a dev guide document (#469) Signed-off-by: Fan, Yang --- documentation/src/docs/dev-guide.md | 182 ++++++++++++++++++++++++++++ 1 file changed, 182 insertions(+) create mode 100644 documentation/src/docs/dev-guide.md diff --git a/documentation/src/docs/dev-guide.md b/documentation/src/docs/dev-guide.md new file mode 100644 index 00000000..ba4e84bc --- /dev/null +++ b/documentation/src/docs/dev-guide.md @@ -0,0 +1,182 @@ + +# Flink Connector - Dev Guide +Learn how to build your own applications that using Flink connector for Pravega. + + +# Prerequisites +To complete this guide, you need: + +* JDK 8 or 11 installed with JAVA_HOME configured appropriately +* Pravega running(Check [here](https://pravega.io/docs/latest/getting-started/) to get started with Pravega) +* Use Gradle or Maven + + + +# Goal +In this guide, we will create a straightforward example application that writes data collected from an external network stream into a Pravega Stream and read the data from the Pravega Stream. +We recommend that you follow the instructions from [Bootstrapping project](#Bootstrapping-the-Project) onwards to create the application step by step. +However, you can go straight to the completed example at [flink-connector-examples](https://github.com/pravega/pravega-samples/tree/master/flink-connector-examples). + + + + +# Starting Flink +Download Flink release and un-tar it. We use Flink 1.11.2 here. +``` +$ tar -xzf flink-1.11.2-bin-scala_2.11.tgz +$ cd flink-1.11.2-bin-scala_2.11 +``` +Start a cluster +``` +$ ./bin/start-cluster.sh +Starting cluster. +Starting standalonesession daemon on host. +Starting taskexecutor daemon on host. +``` +When you are finished you can quickly stop the cluster and all running components. +``` +$ ./bin/stop-cluster.sh +``` + +# Bootstrapping the Project. + +Using Gradle or Maven to bootstrap a sample application against Pravega. Let's create a word count application as an example. +### Gradle +You can follow [here](https://ci.apache.org/projects/flink/flink-docs-stable/dev/project-configuration.html#gradle) to create a gradle project. + +Add the below snippet to dependencies section of build.gradle in the app directory, connector dependencies should be part of the shadow jar. For flink connector dependency, we need to choose the connector which aligns the Flink major version and Scala version if you use Scala, along with the same Pravega version you run. +``` +compile group 'org.apache.flink', name: 'flink-streaming-java_2.12', version: '1.11.2' + +flinkShadowJar group: 'io.pravega', name: 'pravega-connectors-flink-1.11_2.12', version: '0.9.0' +``` +Define custom configurations `flinkShadowJar` +``` +// -> Explicitly define the // libraries we want to be included in the "flinkShadowJar" configuration! +configurations { + flinkShadowJar // dependencies which go into the shadowJar + + // always exclude these (also from transitive dependencies) since they are provided by Flink + flinkShadowJar.exclude group: 'org.apache.flink', module: 'force-shading' + flinkShadowJar.exclude group: 'com.google.code.findbugs', module: 'jsr305' + flinkShadowJar.exclude group: 'org.slf4j' + flinkShadowJar.exclude group: 'org.apache.logging.log4j' +} +``` + +Invoke `gradle clean shadowJar` to build/package the project. You will find a JAR file that contains your application, plus connectors and libraries that you may have added as dependencies to the application: `build/libs/--all.jar`. + + +### Maven + +You can check [maven-quickstart](https://ci.apache.org/projects/flink/flink-docs-release-1.12/dev/project-configuration.html#maven-quickstart) to find how to start with Maven. + +Add below dependencies into Maven POM, these dependencies should be part of the shadow jar +``` + + org.apache.flink + flink-streaming-java_2.12 + 1.11.2 + provided + + + + io.pravega + pravega-connectors-flink-1.11_2.12 + 0.9.0 + +``` + +Invoke `mvn clean package` to build/package your project. You will find a JAR file that contains your application, plus connectors and libraries that you may have added as dependencies to the application: `target/-.jar`. + + + + +## Create an application that writes to Pravega + +Let’s first create a pravega configuration reading from arguments: +```java +ParameterTool params = ParameterTool.fromArgs(args); +PravegaConfig pravegaConfig = PravegaConfig + .fromParams(params) + .withDefaultScope("my_scope"); +``` +Then we need to initialize the Flink execution environment +```java +final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); +``` +Create a datastream that gets input data by connecting to the socket +```java +DataStream dataStream = env.socketTextStream(host, port); +``` +A Pravega Stream may be used as a data sink within a Flink program using an instance of `io.pravega.connectors.flink.FlinkPravegaWriter`. We add an instance of the writer to the dataflow program: +```java +FlinkPravegaWriter writer = FlinkPravegaWriter.builder() + .withPravegaConfig(pravegaConfig) + .forStream(stream) + .withSerializationSchema(new SimpleStringSchema()) + .build(); +dataStream.addSink(writer).name("Pravega Sink"); +``` +Then we execute the job within the Flink environment +```java +env.execute("PravegaWriter"); +``` +Executing the above lines should ensure we have created a PravegaWriter job + +## Create an application that reads from Pravega +Creating a Pravega Reader is similar to Pravega Writer +First create a pravega configuration reading from arguments: +```java +ParameterTool params = ParameterTool.fromArgs(args); +PravegaConfig pravegaConfig = PravegaConfig + .fromParams(params) + .withDefaultScope("my_scope"); +``` +Initialize the Flink execution environment +```java +final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); +``` +A Pravega Stream may be used as a data source within a Flink streaming program using an instance of `io.pravega.connectors.flink.FlinkPravegaReader`. The reader reads a given Pravega Stream (or multiple streams) as a DataStream +```java +FlinkPravegaReader source = FlinkPravegaReader.builder() + .withPravegaConfig(pravegaConfig) + .forStream(stream) + .withDeserializationSchema(new SimpleStringSchema()) + .build(); +``` +Then create a datastream count each word over a 10 second time period +```java +DataStream dataStream = env.addSource(source).name("Pravega Stream") + .flatMap(new Tokenizer()) // The Tokenizer() splits the line into words, and emit streams of "WordCount(word, 1)" + .keyBy("word") + .timeWindow(Time.seconds(10)) + .sum("count"); +``` +Create an output sink to print to stdout for verification +```java +dataStream.print(); +``` +Then we execute the job within the Flink environment +```java +env.execute("PravegaReader"); +``` + +## Run in flink environment +First build your application. From Flink's perspective, the connector to Pravega is part of the streaming application (not part of Flink's core runtime), so the connector code must be part of the application's code artifact (JAR file). Typically, a Flink application is bundled as a `fat-jar` (also known as an `uber-jar`) , such that all its dependencies are embedded. + +Make sure your Pravega and Flink are running. Use the packaged jar, and run: +``` +flink run -c ${your-app}.jar --controller +``` + +# What’s next? +This guide covered the creation of a application that uses Flink connector to read and wirte from a pravega stream. However, there is much more. We recommend continuing the journey by going through [flink connector documents](https://pravega.io/docs/latest/connectors/flink-connector/) and check other examples on [flink-connector-examples](https://github.com/pravega/pravega-samples/tree/master/flink-connector-examples). From 2cbdbacbc6af3efc8b318b039947953b6c8cbf6f Mon Sep 17 00:00:00 2001 From: thekingofcity Date: Wed, 24 Mar 2021 09:44:16 +0800 Subject: [PATCH 5/5] [Issue-445] Drop travis build and use github actions for CI pipeline (#474) Signed-off-by: zhongle.wang --- .github/workflows/build-artifacts.yml | 62 +++++++++++++++++++++++++++ .travis.yml | 61 -------------------------- 2 files changed, 62 insertions(+), 61 deletions(-) create mode 100644 .github/workflows/build-artifacts.yml delete mode 100644 .travis.yml diff --git a/.github/workflows/build-artifacts.yml b/.github/workflows/build-artifacts.yml new file mode 100644 index 00000000..30b210dc --- /dev/null +++ b/.github/workflows/build-artifacts.yml @@ -0,0 +1,62 @@ +name: build-artifacts + +on: [push, pull_request, workflow_dispatch] +# workflow_dispatch should make manually triggered ci/cd possible +# workflow file (like this) with `workflow_dispatch` after on should exist on the **master** or default branch, +# or there will be no ui for a manual trigger. https://github.community/t/workflow-dispatch-event-not-working/128856/2 + +env: + GRADLE_OPTS: "-Xms128m -Xmx1024m" + ORG_GRADLE_PROJECT_logOutput: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' # major or semver Java version will be acceptable, see https://github.com/marketplace/actions/setup-java-jdk#basic + java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk + architecture: x64 # (x64 or x86) - defaults to x64 + + - name: Cache gradle modules + uses: actions/cache@v2 + with: + # gradle packages need to be cached + path: | + .gradle + $HOME/.gradle + $HOME/.m2 + # key to identify the specific cache + # so if we upgrade some modules, the key(hash) of `gradle.properties` will change + # and we will rerun the download to get the newest packages + key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + + - name: Grant execute permission for gradlew + run: chmod +x gradlew + + - name: Build via Gradle + run: ./gradlew clean build + + - name: Build via Gradle with Scala 2.11 + run: | + ./gradlew clean build -PflinkScalaVersion=2.11 + bash <(curl -s https://codecov.io/bash) -t 9c42ff48-d98f-4444-af05-cf734aa1dbd0 + + snapshot: + needs: [build] + runs-on: ubuntu-latest + # only publish the snapshot when it is a push on the master or the release branch (starts with r0.x or r1.x) + if: ${{ github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/r0.') || startsWith(github.ref, 'refs/heads/r1.')) }} + env: + BINTRAY_USER: ${{ secrets.BINTRAY_USER }} + BINTRAY_KEY: ${{ secrets.BINTRAY_KEY }} + steps: + - name: Publish to repo + run: ./gradlew publishToRepo -PpublishUrl=jcenterSnapshot -PpublishUsername=$BINTRAY_USER -PpublishPassword=$BINTRAY_KEY + + - name: Publish to repo with Scala 2.11 + run: ./gradlew publishToRepo -PpublishUrl=jcenterSnapshot -PpublishUsername=$BINTRAY_USER -PpublishPassword=$BINTRAY_KEY -PflinkScalaVersion=2.11 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index b4296bc0..00000000 --- a/.travis.yml +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2017 Dell Inc., or its subsidiaries. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -language: java -install: true -jdk: - - openjdk11 - -# the secure configurations in env: section is for BINTRAY_USER= and BINTRAY_KEY= properties -# which will be used for publishing artifacts to snapshot repository -env: - global: - - GRADLE_OPTS="-Xms128m -Xmx1024m" - - ORG_GRADLE_PROJECT_logOutput=true - - secure: "EPL076YzDvzRIThkh5pbpgd/59vONUsncWg7uESuY/7P7ZVnksUFoE20/vQNo40OTgukkjgnTymb0uJ9jWFpQOnNw+LzdvhC4BtPSAg4SlEuIsOhj6qPdvYe8XhGZj0Z0yGcP7dwu18pgaNSTJ1iVRjPXlysgYOwl0JF/XcL7yzi486JTmmaFlsKUz9sikO00JUwml32DmyUCMsdgtfzKx4CvSepsFjgDlACErul+/kKButS74a9SzaBfLwxI1TFjni9JEMZGcK9pNP6Mi6//1kZcG2W0GWkmAqPo73VvBSaSR/zso/5mJNSsBv7f4eOwIh/Nkv74g4xmVds6wHxCQC77SedGpvVcXnmGVdXHL17sCOenuc1R70K2Qq00hMaDz/2kG497Estd96kpVUav680p8N8WPjEbHqfDY3k+/8MmBce8NWkPGUy4XdF4eJxLUJ8GEXRzR2v9JDY9A2gcNmSC6rwf0VedbopQ/fE5k8CKgvt9eZQM2nJE5ZzObakVssvYNNIexK0NprH0hQMLt68sLBAN7H8M57wbZSUEZD3e52SesK8h0xZIYty1leaUx/G/yo1wxOGNC4fGqyjPYNf04zDv/gnK11eeJBOu+t9T07PVn7XWBg0CdoX4XY+1Rlp8yWvQINXWwYSMhUu+xU1KyCp67qTD7MadZ/OjLE=" - - secure: "CuLbIV4oqk9W1mNdy9Dz/YmOW/638ufJf+B83lxCky6l+Us6p5sGAyv+GZUWtYKpExFah3+lC6Fp8DOlQOsSpNz50bSEFzOWRKDyLui2btS06wXjPhp5M+dBZgPolTrrxb6BM7gGcA0sPkpFbDufblF7E/wmMiBUePsCnHiP3mGlR+Zxk/i44R3Ooy4ScoPGH8kB6i5S0YEF26WV+qDOv3/qRuMAQpCqWhovIvdNps/aZwvYYYvYLrx3sj/+menay/D23i1W2r4w4hdF2oLekkT+qY22dajTKkE+RpoIKDC1YYIz22jnSUN68DRQipsXD/S3IfK2HacvveopdDytHHy+RfYGvuToMnj7rwXf3ycSTLKYK0/kN4HVsNl4Nq1b9CuxZP5YETU51o0bX1BRw8H5cmGhrzZfmcouYEc7KMl51S+3ksP3fuM5ZH2iej3LVVk5sT19sDwo/xcbXZg7Pv2sCMJdnf9YSfhpXIiqC3ZNIA5sWJLQUJtccUCvlu76/6K8y+34Gz4YJXGYZHNQl+0g+b17dnhgJEG2hDV0cFlu7TJqVwFRijbRpRMitQng5+/cBYCiKPYxfg/iaqFxNfJoJXPe2QaYxkt72FzybtCQvNjlFAzf/KRs41jU9o+ZFBaD4X4jmdPRUouwtGP6k7QxnRcnrhiUHncbewcnhE8=" - -sudo: required - -services: - - docker - -cache: - directories: - - .gradle - - $HOME/.gradle - - $HOME/.m2 - -stages: - - name: build - - name: snapshot - # master or release branch - if: (branch = master OR branch =~ /^r[0-9]\.[0-9][\S]*/) AND NOT (type = pull_request) - -jobs: - include: - - stage: build - script: ./gradlew clean build - - script: ./gradlew clean build -PflinkScalaVersion=2.11 - after_success: bash <(curl -s https://codecov.io/bash) -t 9c42ff48-d98f-4444-af05-cf734aa1dbd0 - - - stage: snapshot - script: ./gradlew publishToRepo -PpublishUrl=jcenterSnapshot -PpublishUsername=$BINTRAY_USER -PpublishPassword=$BINTRAY_KEY - - script: ./gradlew publishToRepo -PpublishUrl=jcenterSnapshot -PpublishUsername=$BINTRAY_USER -PpublishPassword=$BINTRAY_KEY -PflinkScalaVersion=2.11 - -notifications: - slack: - matrix: - secure: Gv0RJx1Sa/y5fmvLNwY+2ivfWZYCM0ekrr6UAHqsegnid6P/DFZrSrfSpwvcVh2OVNH8DHLV0BoiuDJ7amtl1eMDMXz5/lLz8tFWFKaHv4yDSadm8ILY/KnYUoP4IRuM3NyKQmBrmZB9Or5KFXboG6ex6UkgbuYy0Zyl6syEe168Iw8hlCRx26Jei7/y+8eE2MIGFh09TLRZ/944YbULum9H3KQLYv8nFdPc7GmR5AK461fnwZ7iYjb7MXkCctE5Vml3p9+2Qliv1ZJqNsQeKmSFW6IhiP6pNZ1V8VJEWMQmX/nBr9745l/N+CoLQz9ajLonlxn9xHdWms4TEu1ynFk6uxEJjlcpXcvcEaKhqAKcTMl0GMMRab2m+/Vt3S/VutJnVXQmnhZGT9glLFQHwcdHNqM/LEbXtyisB7zmGImUQpF2InCwO25IXug5gv64IfOHGMzL56yNIhbRgBY9Ud4Tux+pmkV5ZxJiBkul7/FiHQX7tQLUrzQosD0oyCOmaWD7kmbt15A0TOkLgup4HE+sSS1ASwisa7J2+HsbI3Upy3rNVKuIJP0L4KSTn4HSlDlMLLcWM+nz/YCEfuwSRXJTIstotNYHdsLUZAZSYAX7ejpeiuBRed4a4AlCROeKbKKwCcSvqCOjmCaPTpwJAGeJByOXLL2hfQzpDMKCIKM= - rooms: - secure: Gv0RJx1Sa/y5fmvLNwY+2ivfWZYCM0ekrr6UAHqsegnid6P/DFZrSrfSpwvcVh2OVNH8DHLV0BoiuDJ7amtl1eMDMXz5/lLz8tFWFKaHv4yDSadm8ILY/KnYUoP4IRuM3NyKQmBrmZB9Or5KFXboG6ex6UkgbuYy0Zyl6syEe168Iw8hlCRx26Jei7/y+8eE2MIGFh09TLRZ/944YbULum9H3KQLYv8nFdPc7GmR5AK461fnwZ7iYjb7MXkCctE5Vml3p9+2Qliv1ZJqNsQeKmSFW6IhiP6pNZ1V8VJEWMQmX/nBr9745l/N+CoLQz9ajLonlxn9xHdWms4TEu1ynFk6uxEJjlcpXcvcEaKhqAKcTMl0GMMRab2m+/Vt3S/VutJnVXQmnhZGT9glLFQHwcdHNqM/LEbXtyisB7zmGImUQpF2InCwO25IXug5gv64IfOHGMzL56yNIhbRgBY9Ud4Tux+pmkV5ZxJiBkul7/FiHQX7tQLUrzQosD0oyCOmaWD7kmbt15A0TOkLgup4HE+sSS1ASwisa7J2+HsbI3Upy3rNVKuIJP0L4KSTn4HSlDlMLLcWM+nz/YCEfuwSRXJTIstotNYHdsLUZAZSYAX7ejpeiuBRed4a4AlCROeKbKKwCcSvqCOjmCaPTpwJAGeJByOXLL2hfQzpDMKCIKM= - email: - - tom.kaitchuck@dell.com - - brian.zhou@emc.com - - fpj@pravega.io