Skip to content

Commit

Permalink
Merge pull request #5 from data-integrations/feature/merge-release-to…
Browse files Browse the repository at this point in the history
…-develop-and-snapshot

Feature/merge release to develop and snapshot
  • Loading branch information
anew authored Apr 17, 2019
2 parents b4b9016 + 535e79f commit 294a474
Show file tree
Hide file tree
Showing 13 changed files with 87 additions and 144 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ CDAP Slack Channel: http://cdap-users.herokuapp.com/

## License and Trademarks

Copyright © 2017 Cask Data, Inc.
Copyright © 2017-2019 Cask Data, Inc.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
in compliance with the License. You may obtain a copy of the License at
Expand Down
Binary file added icons/CobolRecordConverter-transform.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
135 changes: 39 additions & 96 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -1,46 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright © 2017-2019 Cask Data, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>co.cask</groupId>
<groupId>io.cdap.plugin</groupId>
<artifactId>cobol-to-avro-transform</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<version>1.2.0-SNAPSHOT</version>
<name>Cobol to Avro Transform</name>

<properties>
<avro.version>1.7.7</avro.version>
<cdap.version>4.1.0</cdap.version>
<cdap.version>6.0.0-SNAPSHOT</cdap.version>
<guava.version>18.0</guava.version>
<janino.version>3.0.7</janino.version>
<legstar.avro.version>0.4.2</legstar.avro.version>
<logback.version>1.2.3</logback.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

<!-- properties for script build step that creates the config files for the artifacts -->
<widgets.dir>widgets</widgets.dir>
<docs.dir>docs</docs.dir>

<!-- This is the version range for the app.parents that this plugin is valid for. Usually this will correspond with
the CDAP version, but not always. -->
<etl.versionRange>[3.3.0,10.0.0-SNAPSHOT)</etl.versionRange>

<!-- These are the application template artifacts that this plugin will be available for. -->
<app.parents>
system:cdap-etl-batch,
system:cdap-data-pipeline,
system:cdap-data-streams
</app.parents>

<!-- this is here because project.basedir evaluates to null in the script build step -->
<main.basedir>${project.basedir}</main.basedir>
<data.stream.parent>system:cdap-data-streams[6.0.0-SNAPSHOT,7.0.0-SNAPSHOT)</data.stream.parent>
<data.pipeline.parent>system:cdap-data-pipeline[6.0.0-SNAPSHOT,7.0.0-SNAPSHOT)</data.pipeline.parent>
</properties>

<dependencies>
<dependency>
<groupId>co.cask.cdap</groupId>
<groupId>io.cdap.cdap</groupId>
<artifactId>cdap-api</artifactId>
<version>${cdap.version}</version>
</dependency>
<dependency>
<groupId>co.cask.cdap</groupId>
<groupId>io.cdap.cdap</groupId>
<artifactId>cdap-etl-api</artifactId>
<version>${cdap.version}</version>
</dependency>
Expand Down Expand Up @@ -89,7 +92,7 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>co.cask.cdap</groupId>
<groupId>io.cdap.cdap</groupId>
<artifactId>cdap-formats</artifactId>
<version>${cdap.version}</version>
</dependency>
Expand All @@ -98,81 +101,21 @@
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.7</version>
<groupId>io.cdap</groupId>
<artifactId>cdap-maven-plugin</artifactId>
<version>1.1.0</version>
<configuration>
<cdapArtifacts>
<parent>${data.pipeline.parent}</parent>
<parent>${data.stream.parent}</parent>
</cdapArtifacts>
</configuration>
<executions>
<execution>
<id>create-artifact-config</id>
<phase>prepare-package</phase>
<configuration>
<target>
<script language="javascript"><![CDATA[
// for some reason, project.basedir evaluates to null if we just get the property here.
// so we set main.basedir to project.basedir in the pom properties, then main.basedir is used here
// where it evaluates correctly for whatever reason
var baseDir = project.getProperty("main.basedir");
var targetDir = project.getProperty("project.build.directory");
var artifactId = project.getProperty("project.artifactId");
var version = project.getProperty("project.version");
var cfgFile = new java.io.File(targetDir, artifactId + "-" + version + ".json");
if (!cfgFile.exists()) {
cfgFile.createNewFile();
}
var parents = project.getProperty("app.parents").split(",");
var parentVersions = project.getProperty("etl.versionRange");
var config = {
"parents": [ ],
"properties": {}
}
for (i = 0; i < parents.length; i++) {
config.parents.push(parents[i].trim() + parentVersions);
}
// look in widgets directory for widget config for each plugin
var widgetsDir = new java.io.File(baseDir, project.getProperty("widgets.dir"));
if (widgetsDir.isDirectory()) {
var widgetsFiles = widgetsDir.listFiles();
for (i = 0; i < widgetsFiles.length; i++) {
var widgetsFile = widgetsFiles[i];
if (widgetsFile.isFile()) {
var propertyName = "widgets." + widgetsFile.getName();
// if the filename ends with .json
if (propertyName.indexOf(".json", propertyName.length - 5) !== -1) {
// strip the .json
propertyName = propertyName.slice(0, -5);
var contents = new java.lang.String(java.nio.file.Files.readAllBytes(widgetsFile.toPath()), java.nio.charset.StandardCharsets.UTF_8);
var contentsAsJson = JSON.parse(contents);
config.properties[propertyName] = JSON.stringify(contentsAsJson);
}
}
}
}
// look in the docs directory for docs for each plugin
var docsDir = new java.io.File(baseDir, project.getProperty("docs.dir"));
if (docsDir.isDirectory()) {
var docFiles = docsDir.listFiles();
for (i = 0; i < docFiles.length; i++) {
var docFile = docFiles[i];
if (docFile.isFile()) {
var propertyName = "doc." + docFile.getName();
// if the filename ends with .md
if (propertyName.indexOf(".md", propertyName.length - 3) !== -1) {
// strip the extension
propertyName = propertyName.slice(0, -3);
var contents = new java.lang.String(java.nio.file.Files.readAllBytes(docFile.toPath()), java.nio.charset.StandardCharsets.UTF_8);
config.properties[propertyName] = contents + "";
}
}
}
}
var fw = new java.io.BufferedWriter(new java.io.FileWriter(cfgFile.getAbsoluteFile()));
fw.write(JSON.stringify(config, null, 2));
fw.close();
]]></script>
</target>
</configuration>
<goals>
<goal>run</goal>
<goal>create-plugin-json</goal>
</goals>
</execution>
</executions>
Expand All @@ -194,7 +137,7 @@
<configuration>
<instructions>
<_exportcontents>
co.cask.*;
io.cdap.plugin.*;
com.google.common.*;
com.legstar.*
</_exportcontents>
Expand All @@ -214,4 +157,4 @@
</plugin>
</plugins>
</build>
</project>
</project>
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,25 +14,24 @@
* the License.
*/

package co.cask.plugin;

import co.cask.cdap.api.annotation.Description;
import co.cask.cdap.api.annotation.Macro;
import co.cask.cdap.api.annotation.Name;
import co.cask.cdap.api.annotation.Plugin;
import co.cask.cdap.api.data.format.StructuredRecord;
import co.cask.cdap.api.plugin.EndpointPluginContext;
import co.cask.cdap.api.plugin.PluginConfig;
import co.cask.cdap.etl.api.Emitter;
import co.cask.cdap.etl.api.Transform;
import co.cask.cdap.etl.api.TransformContext;
import co.cask.cdap.format.StructuredRecordStringConverter;
import co.cask.cobol.CopybookReader;
import co.cask.common.AvroConverter;
import co.cask.common.StreamByteSource;
import co.cask.common.StreamCharSource;
package io.cdap.plugin.cobol;

import com.legstar.avro.cob2avro.io.AbstractZosDatumReader;
import com.legstar.cob2xsd.Cob2XsdConfig;
import io.cdap.cdap.api.annotation.Description;
import io.cdap.cdap.api.annotation.Macro;
import io.cdap.cdap.api.annotation.Name;
import io.cdap.cdap.api.annotation.Plugin;
import io.cdap.cdap.api.data.format.StructuredRecord;
import io.cdap.cdap.api.plugin.EndpointPluginContext;
import io.cdap.cdap.api.plugin.PluginConfig;
import io.cdap.cdap.etl.api.Emitter;
import io.cdap.cdap.etl.api.Transform;
import io.cdap.cdap.etl.api.TransformContext;
import io.cdap.cdap.format.StructuredRecordStringConverter;
import io.cdap.plugin.common.AvroConverter;
import io.cdap.plugin.common.StreamByteSource;
import io.cdap.plugin.common.StreamCharSource;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.slf4j.Logger;
Expand Down Expand Up @@ -61,7 +60,7 @@ public CobolRecordConverter(Config config) {

private CopybookReader copybookReader;
private Schema avroSchema;
private co.cask.cdap.api.data.schema.Schema schema;
private io.cdap.cdap.api.data.schema.Schema schema;

@Override
public void initialize(TransformContext context) throws Exception {
Expand Down Expand Up @@ -107,7 +106,7 @@ private String getCodeFormat() {
* @throws IOException if there are any errors converting schema
*/
@javax.ws.rs.Path("outputSchema")
public co.cask.cdap.api.data.schema.Schema getSchema(GetSchemaRequest request,
public io.cdap.cdap.api.data.schema.Schema getSchema(GetSchemaRequest request,
EndpointPluginContext pluginContext) throws IOException {
Properties properties = new Properties();
properties.setProperty(Cob2XsdConfig.CODE_FORMAT, request.getCodeFormat());
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,7 +14,7 @@
* the License.
*/

package co.cask.cobol;
package io.cdap.plugin.cobol;

import com.github.jknack.handlebars.Handlebars;
import com.github.jknack.handlebars.Template;
Expand Down Expand Up @@ -75,13 +75,13 @@ public CopybookReader(CharSource copybookSource, Properties cobolConfig) throws

// Generate XML schema from the copybook
XmlSchema xmlSchema = new XmlSchemaCollection().read(
cob2xsd.emitXsd(cobolDataItems, "co.cask.cobol").getSchemaDocument());
cob2xsd.emitXsd(cobolDataItems, "io.cdap.plugin.cobol").getSchemaDocument());

// Convert XML schema to Avro schema
Schema avroSchema = translate(xmlSchema);

// Generate the CobolType classes ClassLoader
this.cobolTypeClassLoader = createCobolTypesClassLoader(xmlSchema, "co.cask.cobol");
this.cobolTypeClassLoader = createCobolTypesClassLoader(xmlSchema, "io.cdap.plugin.cobol");
this.avroSchema = avroSchema;
} catch (RecognizerException e) {
throw new IOException("Failed to parse cobol copybook: " + System.lineSeparator()
Expand Down Expand Up @@ -170,7 +170,7 @@ private List<CobolDataItem> parseCopybook(Cob2Xsd cob2xsd, Reader reader) throws
*/
private Schema translate(XmlSchema xmlSchema) throws Xsd2AvroTranslatorException {
Xsd2AvroTranslator avroTranslator = new Xsd2AvroTranslator();
return new Schema.Parser().parse(avroTranslator.translate(xmlSchema, "co.cask.cobol", "schema"));
return new Schema.Parser().parse(avroTranslator.translate(xmlSchema, "io.cdap.plugin.cobol", "schema"));
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,13 +14,13 @@
* the License.
*/

package co.cask.common;
package io.cdap.plugin.common;

import co.cask.cdap.api.data.format.StructuredRecord;
import co.cask.cdap.api.data.schema.Schema;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import io.cdap.cdap.api.data.format.StructuredRecord;
import io.cdap.cdap.api.data.schema.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericEnumSymbol;
import org.apache.avro.generic.GenericFixed;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,7 +14,7 @@
* the License.
*/

package co.cask.common;
package io.cdap.plugin.common;

import com.google.common.io.ByteSource;
import org.apache.twill.filesystem.Location;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,7 +14,7 @@
* the License.
*/

package co.cask.common;
package io.cdap.plugin.common;

import com.google.common.io.CharSource;
import org.apache.twill.filesystem.Location;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,7 +14,7 @@
* the License.
*/

package co.cask.common;
package io.cdap.plugin.common;

import com.google.common.io.ByteSource;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright © 2017 Cask Data, Inc.
* Copyright © 2017-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
Expand All @@ -14,7 +14,7 @@
* the License.
*/

package co.cask.common;
package io.cdap.plugin.common;

import com.google.common.io.CharSource;

Expand Down
Loading

0 comments on commit 294a474

Please sign in to comment.