From a1b7155c5f1968e2103c060cc8ba73cc9a1f7eac Mon Sep 17 00:00:00 2001 From: Yunhan Wang Date: Mon, 3 Apr 2017 14:58:19 -0700 Subject: [PATCH 1/4] SPOI-8252 #resolve #comment Create a simple file to JDBC application --- examples/fileToJdbc/.gitignore | 3 + examples/fileToJdbc/README.md | 70 +++++ .../XmlJavadocCommentsExtractor.xsl | 44 +++ examples/fileToJdbc/pom.xml | 293 ++++++++++++++++++ .../fileToJdbc/src/assemble/appPackage.xml | 43 +++ .../example/FileToJdbcApp/CustomParser.java | 95 ++++++ .../com/example/FileToJdbcApp/FileReader.java | 20 ++ .../FileToJdbcApp/FileToJdbcCsvParser.java | 54 ++++ .../FileToJdbcApp/FileToJdbcCustomParser.java | 50 +++ .../com/example/FileToJdbcApp/PojoEvent.java | 45 +++ .../main/resources/META-INF/properties.xml | 48 +++ .../fileToJdbc/src/main/resources/schema.json | 19 ++ .../FileToJdbcApp/ApplicationTest.java | 124 ++++++++ .../fileToJdbc/src/test/resources/example.sql | 8 + .../src/test/resources/log4j.properties | 21 ++ .../src/test/resources/test-input/sample.txt | 10 + .../fileToJdbc/src/test/resources/test.xml | 58 ++++ 17 files changed, 1005 insertions(+) create mode 100755 examples/fileToJdbc/.gitignore create mode 100755 examples/fileToJdbc/README.md create mode 100755 examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl create mode 100755 examples/fileToJdbc/pom.xml create mode 100755 examples/fileToJdbc/src/assemble/appPackage.xml create mode 100755 examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java create mode 100755 examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java create mode 100755 examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java create mode 100755 examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java create mode 100755 examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java create mode 100755 examples/fileToJdbc/src/main/resources/META-INF/properties.xml create mode 100755 examples/fileToJdbc/src/main/resources/schema.json create mode 100755 examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java create mode 100644 examples/fileToJdbc/src/test/resources/example.sql create mode 100755 examples/fileToJdbc/src/test/resources/log4j.properties create mode 100644 examples/fileToJdbc/src/test/resources/test-input/sample.txt create mode 100755 examples/fileToJdbc/src/test/resources/test.xml diff --git a/examples/fileToJdbc/.gitignore b/examples/fileToJdbc/.gitignore new file mode 100755 index 0000000000..019edc2a0f --- /dev/null +++ b/examples/fileToJdbc/.gitignore @@ -0,0 +1,3 @@ +.DS_Store +/.idea/ +/target/ diff --git a/examples/fileToJdbc/README.md b/examples/fileToJdbc/README.md new file mode 100755 index 0000000000..ad9f6b9523 --- /dev/null +++ b/examples/fileToJdbc/README.md @@ -0,0 +1,70 @@ +## Sample File to JDBC Example + +This example shows how to read files from HDFS, parse into POJOs and then insert into a table in MySQL. + +Given various parsing demands, we give two applications under this package, `FileToJdbcCsvParser` and `FileToJdbcCustomParser`. + +`CsvParser` allows you to parse only CSV format input files. For more complex input format, `CustomParser` allows you to set custom regex to parse. + +Accordingly, we have two additional configuration files (`src/site/conf/exampleCsvParser.xml` and `src/site/conf/exampleCustomParser.xml`) besides the common properties file (`/src/main/resources/META-INF/properties.xml`). + +Users can choose which applicaiton and which addtional configuration file to use during launch time. + + +####**Update Properties:** + +- Update these common properties in the file `/src/main/resources/META-INF/properties.xml`: + +| Property Name | Description | +| ------------- | ----------- | +| dt.operator.FileReader.prop.directory |HDFS input directory path +|dt.operator.JdbcOutput.prop.store.databaseUrl | database URL of the form `jdbc:mysql://hostName:portNumber/dbName` | +| dt.operator.JdbcOutput.prop.store.userName | MySQL user name | +| dt.operator.JdbcOutput.prop.store.password | MySQL user password | +| dt.operator.JdbcOutput.prop.tablename | MySQL output table name | + +- Using CustomParser: update `regexStr` in file `src/site/conf/exampleCustomParser.xml` + + +####**Sample Input:** + +- To set up MySQL database and create table, check `src/test/resources/example.sql` +- To run this example, create files using this format: + +``` + 1,User1,1000 + 2,User2,2000 + 3,User3,3000 + 4,User4,4000 + 5,User5,5000 + 6,User6,6000 + 7,User7,7000 + 8,User8,8000 + 9,User9,9000 + 10,User10,10000 +``` +- To change input format, update `PojoEvent` class and `addFieldInfos()` method in `src/main/java/com/example/FileToJdbcApp`. If using CsvParser, also update `src/main/resources/schema.json`. + +####**Sample Output:** + +- After running successfully, verify +that the database table has the expected output: + +``` + mysql> select * from table_name; + +------------+--------+--------+ + | ACCOUNT_NO | NAME | AMOUNT | + +------------+--------+--------+ + | 1 | User1 | 1000 | + | 2 | User2 | 2000 | + | 3 | User3 | 3000 | + | 4 | User4 | 4000 | + | 5 | User5 | 5000 | + | 6 | User6 | 6000 | + | 7 | User7 | 7000 | + | 8 | User8 | 8000 | + | 9 | User9 | 9000 | + | 10 | User10 | 10000 | + +------------+--------+--------+ + 10 rows in set (0.00 sec) +``` diff --git a/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl b/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl new file mode 100755 index 0000000000..08075a98d3 --- /dev/null +++ b/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/examples/fileToJdbc/pom.xml b/examples/fileToJdbc/pom.xml new file mode 100755 index 0000000000..ae62e0cb83 --- /dev/null +++ b/examples/fileToJdbc/pom.xml @@ -0,0 +1,293 @@ + + + 4.0.0 + + com.example + 1.0-SNAPSHOT + FileToJdbcApp + jar + + + File to JDBC + My Apex Application Description + + + + 3.5.0 + lib/*.jar + 3.6.0 + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + 2.9 + + true + + + + maven-compiler-plugin + 3.3 + + UTF-8 + 1.7 + 1.7 + true + false + true + true + + + + maven-dependency-plugin + 2.8 + + + copy-dependencies + prepare-package + + copy-dependencies + + + target/deps + runtime + + + + + + + maven-assembly-plugin + + + app-package-assembly + package + + single + + + ${project.artifactId}-${project.version}-apexapp + false + + src/assemble/appPackage.xml + + + 0755 + + + + ${apex.apppackage.classpath} + ${apex.version} + ${project.groupId} + ${project.artifactId} + ${project.version} + ${project.name} + ${project.description} + + + + + + + + + maven-antrun-plugin + 1.7 + + + package + + + + + + + run + + + + + createJavadocDirectory + generate-resources + + + + + + + + run + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + attach-artifacts + package + + attach-artifact + + + + + target/${project.artifactId}-${project.version}.apa + apa + + + false + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + xml-doclet + generate-resources + + javadoc + + + com.github.markusbernhardt.xmldoclet.XmlDoclet + -d ${project.build.directory}/generated-resources/xml-javadoc -filename ${project.artifactId}-${project.version}-javadoc.xml + false + + com.github.markusbernhardt + xml-doclet + 1.0.4 + + + + + + + + org.codehaus.mojo + xml-maven-plugin + 1.0 + + + transform-xmljavadoc + generate-resources + + transform + + + + + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + XmlJavadocCommentsExtractor.xsl + ${project.build.directory}/generated-resources/xml-javadoc + + + + + + + maven-resources-plugin + 2.6 + + + copy-resources + process-resources + + copy-resources + + + ${basedir}/target/classes + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + true + + + + + + + + + + + + + + + org.apache.apex + malhar-library + ${malhar.version} + + + org.apache.apex + malhar-contrib + ${malhar.version} + + + org.apache.apex + apex-common + ${apex.version} + provided + + + junit + junit + 4.10 + test + + + org.apache.apex + apex-engine + ${apex.version} + test + + + mysql + mysql-connector-java + 5.1.36 + + + org.codehaus.janino + janino + 2.7.8 + + + org.codehaus.janino + commons-compiler + 2.7.8 + + + net.sf.supercsv + super-csv + 2.4.0 + + + org.hsqldb + hsqldb + 2.3.1 + + + + diff --git a/examples/fileToJdbc/src/assemble/appPackage.xml b/examples/fileToJdbc/src/assemble/appPackage.xml new file mode 100755 index 0000000000..7ad071c69f --- /dev/null +++ b/examples/fileToJdbc/src/assemble/appPackage.xml @@ -0,0 +1,43 @@ + + appPackage + + jar + + false + + + ${basedir}/target/ + /app + + ${project.artifactId}-${project.version}.jar + + + + ${basedir}/target/deps + /lib + + + ${basedir}/src/site/conf + /conf + + *.xml + + + + ${basedir}/src/main/resources/META-INF + /META-INF + + + ${basedir}/src/main/resources/app + /app + + + ${basedir}/src/main/resources/resources + /resources + + + + + diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java new file mode 100755 index 0000000000..e08925598e --- /dev/null +++ b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.FileToJdbcApp; + +import com.datatorrent.api.Context.OperatorContext; +import com.datatorrent.api.DefaultInputPort; +import com.datatorrent.api.DefaultOutputPort; +import com.datatorrent.api.annotation.OutputPortFieldAnnotation; +import com.datatorrent.common.util.BaseOperator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.regex.Pattern; + +// parse input line into pojo event +public class CustomParser extends BaseOperator +{ + private static final Logger LOG = LoggerFactory.getLogger(CustomParser.class); + + // default regex pattern for parsing each line + private static final Pattern RegexDefault = Pattern.compile("[\\p{Punct}\\s]+"); + + private String regexStr; // customized configurable regex string + private transient Pattern regexPattern; // compiled regex pattern generated from customized regex string + + @OutputPortFieldAnnotation(optional = false) + public final transient DefaultOutputPort output = new DefaultOutputPort<>(); + + public final transient DefaultInputPort + input = new DefaultInputPort() { + + @Override + public void process(String line) + { + // use custom regex to split line into words + final String[] words = regexPattern.split(line); + + PojoEvent pojo = new PojoEvent(); + // transform words array into pojo event + try { + int accnum = Integer.parseInt(words[0]); + pojo.setAccountNumber(accnum); + } catch (NumberFormatException e) { + LOG.error("Number Format Exception", e); + pojo.setAccountNumber(0); + } + String name = words[1]; + pojo.setName(name); + try { + int amount = Integer.parseInt(words[2]); + pojo.setAmount(amount); + } catch (NumberFormatException e) { + LOG.error("Number Format Exception", e); + pojo.setAmount(0); + } + output.emit(pojo); + } + }; + + public String getRegexStr() { + return this.regexStr; + } + + public void setRegexStr(String regex) { + this.regexStr = regex; + } + + @Override + public void setup(OperatorContext context) + { + if (null == regexStr) { + regexPattern = RegexDefault; + } else { + regexPattern = Pattern.compile(this.getRegexStr()); + } + } + +} + diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java new file mode 100755 index 0000000000..201c705ec5 --- /dev/null +++ b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java @@ -0,0 +1,20 @@ +package com.example.FileToJdbcApp; + +import com.datatorrent.api.DefaultOutputPort; +import org.apache.apex.malhar.lib.fs.LineByLineFileInputOperator; + +public class FileReader extends LineByLineFileInputOperator{ + + /** + * output in bytes to match CsvParser input type + */ + public final transient DefaultOutputPort byteOutput = new DefaultOutputPort<>(); + + @Override + protected void emit(String tuple) + { + output.emit(tuple); + byteOutput.emit(tuple.getBytes()); + } +} + diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java new file mode 100755 index 0000000000..23d3f36a11 --- /dev/null +++ b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java @@ -0,0 +1,54 @@ +package com.example.FileToJdbcApp; + +import com.datatorrent.api.DAG; +import com.datatorrent.api.StreamingApplication; +import com.datatorrent.api.annotation.ApplicationAnnotation; +import com.datatorrent.contrib.parser.CsvParser; +import com.datatorrent.lib.appdata.schemas.SchemaUtils; +import com.datatorrent.lib.db.jdbc.JdbcFieldInfo; +import com.datatorrent.lib.db.jdbc.JdbcPOJOInsertOutputOperator; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; +import com.google.common.collect.Lists; +import org.apache.hadoop.conf.Configuration; + +import java.util.List; + +import static java.sql.Types.INTEGER; +import static java.sql.Types.VARCHAR; + +@ApplicationAnnotation(name = "FileToJdbcCsvParser") +public class FileToJdbcCsvParser implements StreamingApplication{ + + @Override + public void populateDAG(DAG dag, Configuration configuration) { + // create operators + FileReader fileReader = dag.addOperator("FileReader", FileReader.class); + CsvParser csvParser = dag.addOperator("CsvParser", CsvParser.class); + JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class); + + // configure operators + String pojoSchema = SchemaUtils.jarResourceFileToString("schema.json"); + csvParser.setSchema(pojoSchema); + + jdbcOutputOperator.setFieldInfos(addFieldInfos()); + JdbcTransactionalStore outputStore = new JdbcTransactionalStore(); + jdbcOutputOperator.setStore(outputStore); + + // add stream + dag.addStream("Bytes", fileReader.byteOutput, csvParser.in); + dag.addStream("POJOs", csvParser.out, jdbcOutputOperator.input); + } + + /** + * This method can be modified to have field mappings based on used defined + * class + */ + private List addFieldInfos() { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER , INTEGER)); + fieldInfos.add(new JdbcFieldInfo("NAME", "name", JdbcFieldInfo.SupportType.STRING, VARCHAR)); + fieldInfos.add(new JdbcFieldInfo("AMOUNT", "amount", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); + return fieldInfos; + } +} + diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java new file mode 100755 index 0000000000..c13377f492 --- /dev/null +++ b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java @@ -0,0 +1,50 @@ +package com.example.FileToJdbcApp; + +import com.datatorrent.api.DAG; +import com.datatorrent.api.StreamingApplication; +import com.datatorrent.api.annotation.ApplicationAnnotation; +import com.datatorrent.lib.db.jdbc.JdbcFieldInfo; +import com.datatorrent.lib.db.jdbc.JdbcPOJOInsertOutputOperator; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; +import com.google.common.collect.Lists; +import org.apache.hadoop.conf.Configuration; + +import java.util.List; + +import static java.sql.Types.INTEGER; +import static java.sql.Types.VARCHAR; + +@ApplicationAnnotation(name = "FileToJdbcCustomParser") +public class FileToJdbcCustomParser implements StreamingApplication{ + + @Override + public void populateDAG(DAG dag, Configuration configuration) { + // create operators + FileReader fileReader = dag.addOperator("FileReader", FileReader.class); + CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class); + JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class); + + // configure operators + jdbcOutputOperator.setFieldInfos(addFieldInfos()); + JdbcTransactionalStore outputStore = new JdbcTransactionalStore(); + jdbcOutputOperator.setStore(outputStore); + + // add stream + dag.addStream("Data", fileReader.output, customParser.input); + dag.addStream("POJOs", customParser.output, jdbcOutputOperator.input); + } + + /** + * This method can be modified to have field mappings based on used defined + * class + */ + private List addFieldInfos() + { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER , INTEGER)); + fieldInfos.add(new JdbcFieldInfo("NAME", "name", JdbcFieldInfo.SupportType.STRING, VARCHAR)); + fieldInfos.add(new JdbcFieldInfo("AMOUNT", "amount", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); + return fieldInfos; + } +} + diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java new file mode 100755 index 0000000000..7985b45402 --- /dev/null +++ b/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java @@ -0,0 +1,45 @@ +package com.example.FileToJdbcApp; + +public class PojoEvent +{ + @Override + public String toString() + { + return "PojoEvent [accountNumber=" + accountNumber + ", name=" + name + ", amount=" + amount + "]"; + } + + private int accountNumber; + private String name; + private int amount; + + public int getAccountNumber() + { + return accountNumber; + } + + public void setAccountNumber(int accountNumber) + { + this.accountNumber = accountNumber; + } + + public String getName() + { + return name; + } + + public void setName(String name) + { + this.name = name; + } + + public int getAmount() + { + return amount; + } + + public void setAmount(int amount) + { + this.amount = amount; + } +} + diff --git a/examples/fileToJdbc/src/main/resources/META-INF/properties.xml b/examples/fileToJdbc/src/main/resources/META-INF/properties.xml new file mode 100755 index 0000000000..4f706c4d3d --- /dev/null +++ b/examples/fileToJdbc/src/main/resources/META-INF/properties.xml @@ -0,0 +1,48 @@ + + + + dt.operator.JdbcOutput.prop.store.databaseDriver + com.mysql.jdbc.Driver + + + + dt.operator.JdbcOutput.prop.store.databaseUrl + jdbc:mysql://hostName:portNumber/dbName + + + + dt.operator.JdbcOutput.prop.store.userName + root + + + + dt.operator.JdbcOutput.prop.store.password + password + + + + dt.operator.JdbcOutput.prop.batchSize + 5 + + + + dt.operator.JdbcOutput.prop.tablename + table_name + + + + dt.operator.JdbcOutput.port.input.attr.TUPLE_CLASS + com.example.FileToJdbcApp.PojoEvent + + + + dt.operator.FileReader.prop.directory + input_directory + + + + dt.loggers.level + com.datatorrent.*:INFO,org.apache.*:INFO + + + diff --git a/examples/fileToJdbc/src/main/resources/schema.json b/examples/fileToJdbc/src/main/resources/schema.json new file mode 100755 index 0000000000..3c191cf44e --- /dev/null +++ b/examples/fileToJdbc/src/main/resources/schema.json @@ -0,0 +1,19 @@ +{ + "separator": ",", + "quoteChar":"\"", + "fields": [ + { + "name": "AccountNumber", + "type": "INTEGER" + }, + { + "name": "Name", + "type": "String" + }, + { + "name": "Amount", + "type": "INTEGER" + } + ] +} + diff --git a/examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java b/examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java new file mode 100755 index 0000000000..806bf2e3fc --- /dev/null +++ b/examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java @@ -0,0 +1,124 @@ +package com.example.FileToJdbcApp; + +import com.datatorrent.api.LocalMode; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; +import com.datatorrent.netlet.util.DTThrowable; +import org.apache.hadoop.conf.Configuration; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import javax.validation.ConstraintViolationException; +import java.io.File; +import java.io.IOException; +import java.sql.*; + +/** + * Test the DAG declaration in local mode.
+ * The assumption to run this test case is that test_jdbc_table + * and meta-table are created already. + */ +public class ApplicationTest { + private static final String DB_DRIVER = "org.hsqldb.jdbcDriver"; + private static final String DB_URL = "jdbc:hsqldb:mem:test;sql.syntax_mys=true"; + private static final String TABLE_NAME = "test_jdbc_table"; + + @BeforeClass + public static void setup() { + try { + Class.forName(DB_DRIVER).newInstance(); + + Connection con = DriverManager.getConnection(DB_URL); + Statement stmt = con.createStatement(); + + String createMetaTable = "CREATE TABLE IF NOT EXISTS " + JdbcTransactionalStore.DEFAULT_META_TABLE + " ( " + + JdbcTransactionalStore.DEFAULT_APP_ID_COL + " VARCHAR(100) NOT NULL, " + + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + " INT NOT NULL, " + + JdbcTransactionalStore.DEFAULT_WINDOW_COL + " BIGINT NOT NULL, " + + "UNIQUE (" + JdbcTransactionalStore.DEFAULT_APP_ID_COL + ", " + + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + ", " + JdbcTransactionalStore.DEFAULT_WINDOW_COL + ") " + + ")"; + stmt.executeUpdate(createMetaTable); + + String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; + stmt.executeUpdate(createTable); + + } catch (Throwable e) { + DTThrowable.rethrow(e); + } + } + + public static void cleanTable() + { + try { + Connection con = DriverManager.getConnection(DB_URL); + Statement stmt = con.createStatement(); + String cleanTable = "delete from " + TABLE_NAME; + stmt.executeUpdate(cleanTable); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + public int getNumOfEventsInStore() + { + Connection con; + try { + con = DriverManager.getConnection(DB_URL); + Statement stmt = con.createStatement(); + + String countQuery = "SELECT count(*) from " + TABLE_NAME; + ResultSet resultSet = stmt.executeQuery(countQuery); + resultSet.next(); + return resultSet.getInt(1); + } catch (SQLException e) { + throw new RuntimeException("fetching count", e); + } + } + + @Test + public void testCsvParserApp() throws IOException, Exception { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(new File("src/test/resources/test.xml").toURI().toURL()); + + lma.prepareDAG(new FileToJdbcCsvParser(), conf); + LocalMode.Controller lc = lma.getController(); + lc.runAsync(); // test will terminate after results are available + + // wait for records to be added to table + Thread.sleep(5000); + + Assert.assertEquals("Events in store", 10, getNumOfEventsInStore()); + cleanTable(); + + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } + + @Test + public void testCustomParserApp() throws IOException, Exception { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(new File("src/test/resources/test.xml").toURI().toURL()); + + lma.prepareDAG(new FileToJdbcCustomParser(), conf); + LocalMode.Controller lc = lma.getController(); + lc.runAsync(); // test will terminate after results are available + + // wait for records to be added to table + Thread.sleep(5000); + + Assert.assertEquals("Events in store", 10, getNumOfEventsInStore()); + cleanTable(); + + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } +} + diff --git a/examples/fileToJdbc/src/test/resources/example.sql b/examples/fileToJdbc/src/test/resources/example.sql new file mode 100644 index 0000000000..446124729d --- /dev/null +++ b/examples/fileToJdbc/src/test/resources/example.sql @@ -0,0 +1,8 @@ +CREATE DATABASE IF NOT EXISTS testJdbc; + +USE testJdbc; + +CREATE TABLE IF NOT EXISTS `test_jdbc_table` ( + `ACCOUNT_NO` int(11) NOT NULL, + `NAME` varchar(255), + `AMOUNT` int(11)); diff --git a/examples/fileToJdbc/src/test/resources/log4j.properties b/examples/fileToJdbc/src/test/resources/log4j.properties new file mode 100755 index 0000000000..3bfcdc5517 --- /dev/null +++ b/examples/fileToJdbc/src/test/resources/log4j.properties @@ -0,0 +1,21 @@ +log4j.rootLogger=DEBUG,CONSOLE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n + +log4j.appender.RFA=org.apache.log4j.RollingFileAppender +log4j.appender.RFA.layout=org.apache.log4j.PatternLayout +log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n +log4j.appender.RFA.File=/tmp/app.log + +# to enable, add SYSLOG to rootLogger +log4j.appender.SYSLOG=org.apache.log4j.net.SyslogAppender +log4j.appender.SYSLOG.syslogHost=127.0.0.1 +log4j.appender.SYSLOG.layout=org.apache.log4j.PatternLayout +log4j.appender.SYSLOG.layout.conversionPattern=${dt.cid} %-5p [%t] %c{2} %x - %m%n +log4j.appender.SYSLOG.Facility=LOCAL1 + +log4j.logger.org=info +#log4j.logger.org.apache.commons.beanutils=warn +log4j.logger.com.datatorrent=debug diff --git a/examples/fileToJdbc/src/test/resources/test-input/sample.txt b/examples/fileToJdbc/src/test/resources/test-input/sample.txt new file mode 100644 index 0000000000..362253ee6e --- /dev/null +++ b/examples/fileToJdbc/src/test/resources/test-input/sample.txt @@ -0,0 +1,10 @@ +1,User1,1000 +2,User2,2000 +3,User3,3000 +4,User4,4000 +5,User5,5000 +6,User6,6000 +7,User7,7000 +8,User8,8000 +9,User9,9000 +10,User10,10000 diff --git a/examples/fileToJdbc/src/test/resources/test.xml b/examples/fileToJdbc/src/test/resources/test.xml new file mode 100755 index 0000000000..c3a49c4c4a --- /dev/null +++ b/examples/fileToJdbc/src/test/resources/test.xml @@ -0,0 +1,58 @@ + + + + dt.operator.JdbcOutput.prop.store.databaseDriver + org.hsqldb.jdbcDriver + + + + dt.operator.JdbcOutput.prop.store.databaseUrl + jdbc:hsqldb:mem:test;sql.syntax_mys=true + + + + dt.operator.JdbcOutput.prop.store.userName + sa + + + + dt.operator.JdbcOutput.prop.store.password + + + + + dt.operator.JdbcOutput.prop.batchSize + 5 + + + + dt.operator.JdbcOutput.prop.tablename + test_jdbc_table + + + + dt.operator.JdbcOutput.port.input.attr.TUPLE_CLASS + com.example.FileToJdbcApp.PojoEvent + + + + dt.operator.FileReader.prop.directory + src/test/resources/test-input + + + + dt.loggers.level + com.datatorrent.*:INFO,org.apache.*:INFO + + + + dt.application.FileToJdbcCsvParser.operator.CsvParser.port.out.attr.TUPLE_CLASS + com.example.FileToJdbcApp.PojoEvent + + + + dt.application.FileToJdbcCustomParser.operator.CustomParser.prop.regexStr + , + + + From eae0eeee5e88000c0fabf5aedf3078e33f863cd9 Mon Sep 17 00:00:00 2001 From: devtagare Date: Thu, 13 Apr 2017 15:59:03 -0700 Subject: [PATCH 2/4] JdbcInput and HDFS output example app SPOI-8251 jdbc to jdbc app --- examples/jdbcIngest/.gitignore | 1 + examples/jdbcIngest/README.md | 65 ++++ .../XmlJavadocCommentsExtractor.xsl | 44 +++ examples/jdbcIngest/pom.xml | 298 ++++++++++++++++ .../jdbcIngest/src/assemble/appPackage.xml | 43 +++ .../mydtapp/FileLineOutputOperator.java | 36 ++ .../java/com/example/mydtapp/JdbcHDFSApp.java | 75 ++++ .../mydtapp/JdbcPollerApplication.java | 48 +++ .../java/com/example/mydtapp/PojoEvent.java | 44 +++ .../META-INF/properties-PollJdbcToHDFSApp.xml | 73 ++++ .../properties-SimpleJdbcToHDFSApp.xml | 66 ++++ .../com/example/mydtapp/ApplicationTest.java | 56 +++ .../com/example/mydtapp/JdbcInputAppTest.java | 137 ++++++++ .../mydtapp/JdbcPollerApplicationTest.java | 128 +++++++ .../jdbcIngest/src/test/resources/example.sql | 24 ++ .../src/test/resources/log4j.properties | 21 ++ examples/jdbcToJdbc/.gitignore | 1 + examples/jdbcToJdbc/README.md | 55 +++ .../XmlJavadocCommentsExtractor.xsl | 44 +++ examples/jdbcToJdbc/pom.xml | 319 ++++++++++++++++++ .../jdbcToJdbc/src/assemble/appPackage.xml | 43 +++ .../com/example/mydtapp/JdbcToJdbcApp.java | 101 ++++++ .../java/com/example/mydtapp/PojoEvent.java | 44 +++ .../main/resources/META-INF/properties.xml | 88 +++++ .../com/example/mydtapp/ApplicationTest.java | 42 +++ .../com/example/mydtapp/JdbcOperatorTest.java | 155 +++++++++ .../jdbcToJdbc/src/test/resources/example.sql | 36 ++ .../src/test/resources/log4j.properties | 21 ++ 28 files changed, 2108 insertions(+) create mode 100644 examples/jdbcIngest/.gitignore create mode 100644 examples/jdbcIngest/README.md create mode 100644 examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl create mode 100644 examples/jdbcIngest/pom.xml create mode 100644 examples/jdbcIngest/src/assemble/appPackage.xml create mode 100644 examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java create mode 100644 examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java create mode 100644 examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java create mode 100644 examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java create mode 100644 examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml create mode 100644 examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml create mode 100644 examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java create mode 100644 examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java create mode 100644 examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java create mode 100644 examples/jdbcIngest/src/test/resources/example.sql create mode 100644 examples/jdbcIngest/src/test/resources/log4j.properties create mode 100644 examples/jdbcToJdbc/.gitignore create mode 100644 examples/jdbcToJdbc/README.md create mode 100644 examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl create mode 100644 examples/jdbcToJdbc/pom.xml create mode 100644 examples/jdbcToJdbc/src/assemble/appPackage.xml create mode 100644 examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java create mode 100644 examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java create mode 100644 examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml create mode 100644 examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java create mode 100644 examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java create mode 100644 examples/jdbcToJdbc/src/test/resources/example.sql create mode 100644 examples/jdbcToJdbc/src/test/resources/log4j.properties diff --git a/examples/jdbcIngest/.gitignore b/examples/jdbcIngest/.gitignore new file mode 100644 index 0000000000..b83d22266a --- /dev/null +++ b/examples/jdbcIngest/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/examples/jdbcIngest/README.md b/examples/jdbcIngest/README.md new file mode 100644 index 0000000000..ec01985957 --- /dev/null +++ b/examples/jdbcIngest/README.md @@ -0,0 +1,65 @@ +## Sample mysql implementation + +This project contains two applications to read records from a table in `MySQL`, create POJOs and write them to a file +in the user specified directory in HDFS. + +1. SimpleJdbcToHDFSApp: Reads table records as per given query and emits them as POJOs. +2. PollJdbcToHDFSApp: Reads table records using partitions in parallel fashion also polls for newly **appended** records and emits them as POJOs. + +Follow these steps to run these applications: + +**Step 1**: Update these properties in the file `src/main/resources/META_INF/properties-.xml`: + +| Property Name | Description | +| ------------- | ----------- | +| dt.application..operator.JdbcInput.prop.store.databaseUrl | database URL of the form `jdbc:mysql://hostName:portNumber/dbName` | +| dt.application..operator.JdbcInput.prop.store.userName | MySQL user name | +| dt.application..operator.JdbcInput.prop.store.password | MySQL user password | +| dt.application..operator.FileOutputOperator.filePath | HDFS output directory path | + +**Step 2**: Create database table and add entries + +Go to the MySQL console and run (where _{path}_ is a suitable prefix): + + mysql> source {path}/src/test/resources/example.sql + +After this, please verify that `testDev.test_event_table` is created and has 10 rows: + + mysql> select count(*) from testDev.test_event_table; + +----------+ + | count(*) | + +----------+ + | 10 | + +----------+ + +**Step 3**: Create HDFS output directory if not already present (_{path}_ should be the same as specified in `META_INF/properties-.xml`): + + hadoop fs -mkdir -p {path} + +**Step 4**: Build the code: + + shell> mvn clean install + +Upload the `target/jdbcInput-1.0-SNAPSHOT.apa` to the UI console if available or launch it from +the commandline using `apexcli`. + +**Step 5**: During launch use `src/main/resources/META_INF/properties-.xml` as a custom configuration file; then verify +that the output directory has the expected output: + + shell> hadoop fs -cat /2_op.dat.* | wc -l + +This should return 10 as the count. + +Sample Output: + + hadoop fs -cat /2_op.dat.0 + PojoEvent [accountNumber=1, name=User1, amount=1000] + PojoEvent [accountNumber=2, name=User2, amount=2000] + PojoEvent [accountNumber=3, name=User3, amount=3000] + PojoEvent [accountNumber=4, name=User4, amount=4000] + PojoEvent [accountNumber=5, name=User5, amount=5000] + PojoEvent [accountNumber=6, name=User6, amount=6000] + PojoEvent [accountNumber=7, name=User7, amount=7000] + PojoEvent [accountNumber=8, name=User8, amount=8000] + PojoEvent [accountNumber=9, name=User9, amount=9000] + PojoEvent [accountNumber=10, name=User10, amount=1000] diff --git a/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl b/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl new file mode 100644 index 0000000000..08075a98d3 --- /dev/null +++ b/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/examples/jdbcIngest/pom.xml b/examples/jdbcIngest/pom.xml new file mode 100644 index 0000000000..f9288b8284 --- /dev/null +++ b/examples/jdbcIngest/pom.xml @@ -0,0 +1,298 @@ + + + 4.0.0 + + com.example + 1.0-SNAPSHOT + jdbcInput + jar + + + JDBC Input Operator + Example Uses of JDBC Input Operator + + + + 3.5.0 + lib/*.jar + 3.6.0 + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + 2.9 + + true + + + + maven-compiler-plugin + 3.3 + + UTF-8 + 1.7 + 1.7 + true + false + true + true + + + + maven-dependency-plugin + 2.8 + + + copy-dependencies + prepare-package + + copy-dependencies + + + target/deps + runtime + + + + + + + maven-assembly-plugin + + + app-package-assembly + package + + single + + + ${project.artifactId}-${project.version}-apexapp + false + + src/assemble/appPackage.xml + + + 0755 + + + + ${apex.apppackage.classpath} + ${apex.version} + ${project.groupId} + ${project.artifactId} + ${project.version} + ${project.name} + ${project.description} + + + + + + + + + maven-antrun-plugin + 1.7 + + + package + + + + + + + run + + + + + createJavadocDirectory + generate-resources + + + + + + + + run + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + attach-artifacts + package + + attach-artifact + + + + + target/${project.artifactId}-${project.version}.apa + apa + + + false + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + xml-doclet + generate-resources + + javadoc + + + com.github.markusbernhardt.xmldoclet.XmlDoclet + -d + ${project.build.directory}/generated-resources/xml-javadoc + -filename + ${project.artifactId}-${project.version}-javadoc.xml + false + + com.github.markusbernhardt + xml-doclet + 1.0.4 + + + + + + + + org.codehaus.mojo + xml-maven-plugin + 1.0 + + + transform-xmljavadoc + generate-resources + + transform + + + + + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + XmlJavadocCommentsExtractor.xsl + ${project.build.directory}/generated-resources/xml-javadoc + + + + + + + maven-resources-plugin + 2.6 + + + copy-resources + process-resources + + copy-resources + + + ${basedir}/target/classes + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + true + + + + + + + + + + + + + + + org.apache.apex + malhar-library + ${malhar.version} + + + * + * + + + + + org.apache.apex + apex-common + ${apex.version} + provided + + + junit + junit + 4.10 + test + + + org.apache.apex + apex-engine + ${apex.version} + test + + + mysql + mysql-connector-java + 5.1.36 + + + org.jooq + jooq + 3.6.4 + + + org.codehaus.janino + janino + 2.7.8 + + + org.hsqldb + hsqldb + 2.3.1 + + + + + diff --git a/examples/jdbcIngest/src/assemble/appPackage.xml b/examples/jdbcIngest/src/assemble/appPackage.xml new file mode 100644 index 0000000000..7ad071c69f --- /dev/null +++ b/examples/jdbcIngest/src/assemble/appPackage.xml @@ -0,0 +1,43 @@ + + appPackage + + jar + + false + + + ${basedir}/target/ + /app + + ${project.artifactId}-${project.version}.jar + + + + ${basedir}/target/deps + /lib + + + ${basedir}/src/site/conf + /conf + + *.xml + + + + ${basedir}/src/main/resources/META-INF + /META-INF + + + ${basedir}/src/main/resources/app + /app + + + ${basedir}/src/main/resources/resources + /resources + + + + + diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java b/examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java new file mode 100644 index 0000000000..e155f2311b --- /dev/null +++ b/examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import com.datatorrent.lib.io.fs.AbstractFileOutputOperator; + +public class FileLineOutputOperator extends AbstractFileOutputOperator +{ + @Override + protected String getFileName(Object input) + { + return context.getId() + "_" + "op.dat"; + } + + @Override + protected byte[] getBytesForTuple(Object input) + { + return (input.toString() + "\n").getBytes(); + } +} diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java b/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java new file mode 100644 index 0000000000..5605bcfcd2 --- /dev/null +++ b/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +import com.google.common.collect.Lists; + +import com.datatorrent.api.Context; +import com.datatorrent.api.DAG; +import com.datatorrent.api.DAG.Locality; +import com.datatorrent.api.StreamingApplication; +import com.datatorrent.api.annotation.ApplicationAnnotation; +import com.datatorrent.lib.db.jdbc.JdbcPOJOInputOperator; +import com.datatorrent.lib.db.jdbc.JdbcStore; +import com.datatorrent.lib.util.FieldInfo; +import com.datatorrent.lib.util.FieldInfo.SupportType; + +@ApplicationAnnotation(name = "SimpleJdbcToHDFSApp") +public class JdbcHDFSApp implements StreamingApplication +{ + @Override + public void populateDAG(DAG dag, Configuration conf) + { + JdbcPOJOInputOperator jdbcInputOperator = dag.addOperator("JdbcInput", new JdbcPOJOInputOperator()); + /** + * The class given below can be updated to the user defined class based on + * input table schema The addField infos method needs to be updated + * accordingly This line can be commented and class can be set from the + * properties file + */ + // dag.setOutputPortAttribute(jdbcInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, PojoEvent.class); + + jdbcInputOperator.setFieldInfos(addFieldInfos()); + + JdbcStore store = new JdbcStore(); + jdbcInputOperator.setStore(store); + + FileLineOutputOperator fileOutput = dag.addOperator("FileOutputOperator", new FileLineOutputOperator()); + + dag.addStream("POJO's", jdbcInputOperator.outputPort, fileOutput.input).setLocality(Locality.CONTAINER_LOCAL); + } + + /** + * This method can be modified to have field mappings based on used defined + * class + */ + private List addFieldInfos() + { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new FieldInfo("ACCOUNT_NO", "accountNumber", SupportType.INTEGER)); + fieldInfos.add(new FieldInfo("NAME", "name", SupportType.STRING)); + fieldInfos.add(new FieldInfo("AMOUNT", "amount", SupportType.INTEGER)); + return fieldInfos; + } + +} diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java b/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java new file mode 100644 index 0000000000..54d71f71bb --- /dev/null +++ b/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java @@ -0,0 +1,48 @@ +package com.example.mydtapp; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.Context.PortContext; +import com.datatorrent.api.DAG; +import com.datatorrent.api.StreamingApplication; +import com.datatorrent.api.annotation.ApplicationAnnotation; +import com.datatorrent.lib.db.jdbc.JdbcPOJOPollInputOperator; +import com.datatorrent.lib.db.jdbc.JdbcStore; +import com.datatorrent.lib.util.FieldInfo; +import com.datatorrent.lib.util.FieldInfo.SupportType; +import com.google.common.collect.Lists; + +@ApplicationAnnotation(name = "PollJdbcToHDFSApp") +public class JdbcPollerApplication implements StreamingApplication +{ + public void populateDAG(DAG dag, Configuration conf) + { + JdbcPOJOPollInputOperator poller = dag.addOperator("JdbcPoller", new JdbcPOJOPollInputOperator()); + + JdbcStore store = new JdbcStore(); + poller.setStore(store); + + poller.setFieldInfos(addFieldInfos()); + + FileLineOutputOperator writer = dag.addOperator("Writer", new FileLineOutputOperator()); + dag.setInputPortAttribute(writer.input, PortContext.PARTITION_PARALLEL, true); + writer.setRotationWindows(60); + + dag.addStream("dbrecords", poller.outputPort, writer.input); + } + + /** + * This method can be modified to have field mappings based on used defined + * class + */ + private List addFieldInfos() + { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new FieldInfo("ACCOUNT_NO", "accountNumber", SupportType.INTEGER)); + fieldInfos.add(new FieldInfo("NAME", "name", SupportType.STRING)); + fieldInfos.add(new FieldInfo("AMOUNT", "amount", SupportType.INTEGER)); + return fieldInfos; + } +} diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java b/examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java new file mode 100644 index 0000000000..f56522b6a5 --- /dev/null +++ b/examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java @@ -0,0 +1,44 @@ +package com.example.mydtapp; + +public class PojoEvent +{ + @Override + public String toString() + { + return "PojoEvent [accountNumber=" + accountNumber + ", name=" + name + ", amount=" + amount + "]"; + } + + private int accountNumber; + private String name; + private int amount; + + public int getAccountNumber() + { + return accountNumber; + } + + public void setAccountNumber(int accountNumber) + { + this.accountNumber = accountNumber; + } + + public String getName() + { + return name; + } + + public void setName(String name) + { + this.name = name; + } + + public int getAmount() + { + return amount; + } + + public void setAmount(int amount) + { + this.amount = amount; + } +} diff --git a/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml b/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml new file mode 100644 index 0000000000..6e7aaf65a2 --- /dev/null +++ b/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml @@ -0,0 +1,73 @@ + + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.partitionCount + 2 + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseDriver + com.mysql.jdbc.Driver + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseUrl + jdbc:mysql://localhost:3306/testDev + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.userName + root + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.password + mysql + + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.batchSize + 300 + + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.key + ACCOUNT_NO + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.columnsExpression + ACCOUNT_NO,NAME,AMOUNT + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.port.outputPort.attr.TUPLE_CLASS + com.example.mydtapp.PojoEvent + + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.tableName + test_event_table + + + + dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.pollInterval + 1000 + + + + + dt.application.PollJdbcToHDFSApp.operator.Writer.filePath + /tmp/test/output + + + + dt.loggers.level + com.datatorrent.*:DEBUG,org.apache.*:INFO + + diff --git a/examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml b/examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml new file mode 100644 index 0000000000..9fce7f8991 --- /dev/null +++ b/examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml @@ -0,0 +1,66 @@ + + + + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseDriver + + org.hsqldb.jdbcDriver + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseUrl + + jdbc:hsqldb:mem:test + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.fetchSize + + 50 + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.query + + select * from test_event_table + + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.tableName + + test_event_table + + + + + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.port.outputPort.attr.TUPLE_CLASS + + com.example.mydtapp.PojoEvent + + + + + dt.application.SimpleJdbcToHDFSApp.operator.FileOutputOperator.filePath + + /tmp/jdbcApp + + + + dt.application.SimpleJdbcToHDFSApp.operator.FileOutputOperator.rotationWindows + + 5 + + + + diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java b/examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java new file mode 100644 index 0000000000..fb78944a93 --- /dev/null +++ b/examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import java.io.IOException; + +import javax.validation.ConstraintViolationException; + +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.LocalMode; + +/** + * Test the DAG declaration in local mode.
+ * The assumption to run this test case is that test_event_table is created + * already + */ +public class ApplicationTest +{ + + @Test + @Ignore + public void testApplication() throws IOException, Exception + { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-SimpleJdbcToHDFSApp.xml")); + lma.prepareDAG(new JdbcHDFSApp(), conf); + LocalMode.Controller lc = lma.getController(); + lc.run(10000); // runs for 10 seconds and quits + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } +} diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java b/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java new file mode 100644 index 0000000000..1d95f4de3f --- /dev/null +++ b/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java @@ -0,0 +1,137 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import java.io.File; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Collection; + +import javax.validation.ConstraintViolationException; + +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.LocalMode; + +/** + * Application test for {@link JdbcHDFSApp} + */ +public class JdbcInputAppTest +{ + private static final String DB_DRIVER = "org.hsqldb.jdbcDriver"; + private static final String URL = "jdbc:hsqldb:mem:test;sql.syntax_mys=true"; + private static final String TABLE_NAME = "test_event_table"; + private static final String FILE_NAME = "/tmp/jdbcApp"; + + @BeforeClass + public static void setup() + { + try { + cleanup(); + } catch (Exception e) { + throw new RuntimeException(e); + } + try { + Class.forName(DB_DRIVER).newInstance(); + + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + + String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; + stmt.executeUpdate(createTable); + cleanTable(); + insertEventsInTable(10, 0); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @AfterClass + public static void cleanup() + { + try { + FileUtils.deleteDirectory(new File(FILE_NAME)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void cleanTable() + { + try { + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + String cleanTable = "delete from " + TABLE_NAME; + stmt.executeUpdate(cleanTable); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + public static void insertEventsInTable(int numEvents, int offset) + { + try { + Connection con = DriverManager.getConnection(URL); + String insert = "insert into " + TABLE_NAME + " values (?,?,?)"; + PreparedStatement stmt = con.prepareStatement(insert); + for (int i = 0; i < numEvents; i++, offset++) { + stmt.setInt(1, offset); + stmt.setString(2, "Account_Holder-" + offset); + stmt.setInt(3, (offset * 1000)); + stmt.executeUpdate(); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testApplication() throws Exception + { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-SimpleJdbcToHDFSApp.xml")); + lma.prepareDAG(new JdbcHDFSApp(), conf); + LocalMode.Controller lc = lma.getController(); + lc.runAsync(); + + // wait for output files to roll + Thread.sleep(5000); + + String[] extensions = { "dat.0", "tmp" }; + Collection list = FileUtils.listFiles(new File(FILE_NAME), extensions, false); + Assert.assertEquals("Records in file", 10, FileUtils.readLines(list.iterator().next()).size()); + + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } +} diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java b/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java new file mode 100644 index 0000000000..b96d4aeaab --- /dev/null +++ b/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java @@ -0,0 +1,128 @@ +package com.example.mydtapp; + +import java.io.File; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Collection; + +import javax.validation.ConstraintViolationException; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.datatorrent.api.LocalMode; + +public class JdbcPollerApplicationTest +{ + private static final String DB_DRIVER = "org.hsqldb.jdbcDriver"; + private static final String URL = "jdbc:hsqldb:mem:test;sql.syntax_mys=true"; + private static final String TABLE_NAME = "test_event_table"; + private static final String OUTPUT_DIR_NAME = "/tmp/test/output"; + + @BeforeClass + public static void setup() + { + try { + cleanup(); + } catch (Exception e) { + throw new RuntimeException(e); + } + try { + Class.forName(DB_DRIVER).newInstance(); + + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + + String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; + stmt.executeUpdate(createTable); + cleanTable(); + insertEventsInTable(10, 0); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @AfterClass + public static void cleanup() + { + try { + FileUtils.deleteDirectory(new File(OUTPUT_DIR_NAME)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void cleanTable() + { + try { + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + String cleanTable = "delete from " + TABLE_NAME; + stmt.executeUpdate(cleanTable); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + public static void insertEventsInTable(int numEvents, int offset) + { + try { + Connection con = DriverManager.getConnection(URL); + String insert = "insert into " + TABLE_NAME + " values (?,?,?)"; + PreparedStatement stmt = con.prepareStatement(insert); + for (int i = 0; i < numEvents; i++, offset++) { + stmt.setInt(1, offset); + stmt.setString(2, "Account_Holder-" + offset); + stmt.setInt(3, (offset * 1000)); + stmt.executeUpdate(); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testApplication() throws Exception + { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseUrl", URL); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseDriver", DB_DRIVER); + conf.setInt("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.partitionCount", 2); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.key", "ACCOUNT_NO"); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.columnsExpression", "ACCOUNT_NO,NAME,AMOUNT"); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.tableName", TABLE_NAME); + conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.port.outputPort.attr.TUPLE_CLASS", + "com.example.mydtapp.PojoEvent"); + conf.set("dt.application.PollJdbcToHDFSApp.operator.Writer.filePath", OUTPUT_DIR_NAME); + + lma.prepareDAG(new JdbcPollerApplication(), conf); + LocalMode.Controller lc = lma.getController(); + lc.runAsync(); + + // wait for output files to roll + Thread.sleep(5000); + + String[] extensions = { "dat.0", "tmp" }; + Collection list = FileUtils.listFiles(new File(OUTPUT_DIR_NAME), extensions, false); + int recordsCount = 0; + for (File file : list) { + recordsCount += FileUtils.readLines(file).size(); + } + Assert.assertEquals("Records in file", 10, recordsCount); + + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } +} diff --git a/examples/jdbcIngest/src/test/resources/example.sql b/examples/jdbcIngest/src/test/resources/example.sql new file mode 100644 index 0000000000..531c659b4d --- /dev/null +++ b/examples/jdbcIngest/src/test/resources/example.sql @@ -0,0 +1,24 @@ +DROP DATABASE IF EXISTS testDev; + +CREATE DATABASE testDev; + +USE testDev; + +CREATE TABLE IF NOT EXISTS `test_event_table` ( + `ACCOUNT_NO` int(11) NOT NULL, + `NAME` varchar(255) DEFAULT NULL, + `AMOUNT` int(11) DEFAULT NULL, + primary key(`ACCOUNT_NO`) +) ENGINE=MyISAM DEFAULT CHARSET=latin1; + +INSERT INTO `test_event_table` (`ACCOUNT_NO`, `NAME`, `AMOUNT`) VALUES +(1, 'User1', 1000), +(2, 'User2', 2000), +(3, 'User3', 3000), +(4, 'User4', 4000), +(5, 'User5', 5000), +(6, 'User6', 6000), +(7, 'User7', 7000), +(8, 'User8', 8000), +(9, 'User9', 9000), +(10, 'User10', 1000); diff --git a/examples/jdbcIngest/src/test/resources/log4j.properties b/examples/jdbcIngest/src/test/resources/log4j.properties new file mode 100644 index 0000000000..3bfcdc5517 --- /dev/null +++ b/examples/jdbcIngest/src/test/resources/log4j.properties @@ -0,0 +1,21 @@ +log4j.rootLogger=DEBUG,CONSOLE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n + +log4j.appender.RFA=org.apache.log4j.RollingFileAppender +log4j.appender.RFA.layout=org.apache.log4j.PatternLayout +log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n +log4j.appender.RFA.File=/tmp/app.log + +# to enable, add SYSLOG to rootLogger +log4j.appender.SYSLOG=org.apache.log4j.net.SyslogAppender +log4j.appender.SYSLOG.syslogHost=127.0.0.1 +log4j.appender.SYSLOG.layout=org.apache.log4j.PatternLayout +log4j.appender.SYSLOG.layout.conversionPattern=${dt.cid} %-5p [%t] %c{2} %x - %m%n +log4j.appender.SYSLOG.Facility=LOCAL1 + +log4j.logger.org=info +#log4j.logger.org.apache.commons.beanutils=warn +log4j.logger.com.datatorrent=debug diff --git a/examples/jdbcToJdbc/.gitignore b/examples/jdbcToJdbc/.gitignore new file mode 100644 index 0000000000..b83d22266a --- /dev/null +++ b/examples/jdbcToJdbc/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/examples/jdbcToJdbc/README.md b/examples/jdbcToJdbc/README.md new file mode 100644 index 0000000000..562de69258 --- /dev/null +++ b/examples/jdbcToJdbc/README.md @@ -0,0 +1,55 @@ +JdbcToJdbc App + +This application reads from a source table in MySQL, creates POJO's and writes the POJO's to another table in MySQL. + +Steps : + +Step 1 : Update the below properties in the properties file - src/site/conf/example.xml + +1.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseUrl +- data base URL of the form jdbc:mysql://hostName:portNumber/dbName +2.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.userName +- mysql user name +3.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.password +- password +4.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseUrl +- data base URL of the form jdbc:mysql://hostName:portNumber/dbName +5.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.userName +- mysql user name +6.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.password +- password + +Step 2: Create database, table and add entries + +Go to mysql console and run the below command, +mysql> source src/test/resources/example.sql + +After this is done, please verify that testDev.test_event_table is created and has 10 rows.It will also create an output table by the name testDev.test_output_event_table + +mysql> select count(*) from testDev.test_event_table; ++----------+ +| count(*) | ++----------+ +| 10 | ++----------+ + +Step 3: Build the code, +shell> mvn clean install + +Upload the target/jdbcInput-1.0-SNAPSHOT.apa to the gateway + +Step 4 : During launch use "Specify custom properties" option and select example.xml + +Verification : + +Log on to the mysql console + +mysql> select count(*) from testDev.test_event_table; ++----------+ +| count(*) | ++----------+ +| 10 | ++----------+ + + + diff --git a/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl b/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl new file mode 100644 index 0000000000..08075a98d3 --- /dev/null +++ b/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/examples/jdbcToJdbc/pom.xml b/examples/jdbcToJdbc/pom.xml new file mode 100644 index 0000000000..8ed69d8639 --- /dev/null +++ b/examples/jdbcToJdbc/pom.xml @@ -0,0 +1,319 @@ + + + 4.0.0 + + com.example + 1.0-SNAPSHOT + jdbcToJdbc + jar + + + JDBC Input Operator + Example Use of JDBC Input Operator + + + + 3.5.0 + lib/*.jar + 3.6.0 + lib/*.jar + lib/*.jar + + + + + false + + Datatorrent-Releases + DataTorrent Release Repository + https://www.datatorrent.com/maven/content/repositories/releases/ + + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + 2.9 + + true + + + + maven-compiler-plugin + 3.3 + + UTF-8 + 1.7 + 1.7 + true + false + true + true + + + + maven-dependency-plugin + 2.8 + + + copy-dependencies + prepare-package + + copy-dependencies + + + target/deps + runtime + + + + + + + maven-assembly-plugin + + + app-package-assembly + package + + single + + + ${project.artifactId}-${project.version}-apexapp + false + + src/assemble/appPackage.xml + + + 0755 + + + + ${apex.apppackage.classpath} + ${apex.version} + ${project.groupId} + ${project.artifactId} + ${project.version} + ${project.name} + ${project.description} + + + + + + + + + maven-antrun-plugin + 1.7 + + + package + + + + + + + run + + + + + createJavadocDirectory + generate-resources + + + + + + + + run + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.9.1 + + + attach-artifacts + package + + attach-artifact + + + + + target/${project.artifactId}-${project.version}.apa + apa + + + false + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + xml-doclet + generate-resources + + javadoc + + + com.github.markusbernhardt.xmldoclet.XmlDoclet + -d + ${project.build.directory}/generated-resources/xml-javadoc + -filename + ${project.artifactId}-${project.version}-javadoc.xml + false + + com.github.markusbernhardt + xml-doclet + 1.0.4 + + + + + + + + org.codehaus.mojo + xml-maven-plugin + 1.0 + + + transform-xmljavadoc + generate-resources + + transform + + + + + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + XmlJavadocCommentsExtractor.xsl + ${project.build.directory}/generated-resources/xml-javadoc + + + + + + + maven-resources-plugin + 2.6 + + + copy-resources + process-resources + + copy-resources + + + ${basedir}/target/classes + + + ${project.build.directory}/generated-resources/xml-javadoc + + ${project.artifactId}-${project.version}-javadoc.xml + + true + + + + + + + + + + + + + + + org.apache.apex + malhar-library + ${malhar.version} + + + + * + * + + + + + + org.apache.apex + apex-common + ${apex.version} + provided + + + junit + junit + 4.10 + test + + + + org.apache.apex + apex-engine + ${apex.version} + test + + + + mysql + mysql-connector-java + 5.1.36 + + + + org.codehaus.janino + janino + 2.7.8 + + + + org.codehaus.janino + commons-compiler + 2.7.8 + + + + org.hsqldb + hsqldb + 2.3.1 + test + + + + diff --git a/examples/jdbcToJdbc/src/assemble/appPackage.xml b/examples/jdbcToJdbc/src/assemble/appPackage.xml new file mode 100644 index 0000000000..7ad071c69f --- /dev/null +++ b/examples/jdbcToJdbc/src/assemble/appPackage.xml @@ -0,0 +1,43 @@ + + appPackage + + jar + + false + + + ${basedir}/target/ + /app + + ${project.artifactId}-${project.version}.jar + + + + ${basedir}/target/deps + /lib + + + ${basedir}/src/site/conf + /conf + + *.xml + + + + ${basedir}/src/main/resources/META-INF + /META-INF + + + ${basedir}/src/main/resources/app + /app + + + ${basedir}/src/main/resources/resources + /resources + + + + + diff --git a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java b/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java new file mode 100644 index 0000000000..6dffa8704a --- /dev/null +++ b/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +import com.google.common.collect.Lists; + +import com.datatorrent.api.DAG; +import com.datatorrent.api.DAG.Locality; +import com.datatorrent.api.StreamingApplication; +import com.datatorrent.api.annotation.ApplicationAnnotation; +import com.datatorrent.lib.db.jdbc.JdbcPOJOInputOperator; +import com.datatorrent.lib.db.jdbc.JdbcPOJOInsertOutputOperator; +import com.datatorrent.lib.db.jdbc.JdbcStore; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; +import com.datatorrent.lib.util.FieldInfo; +import com.datatorrent.lib.util.FieldInfo.SupportType; + +@ApplicationAnnotation(name = "JdbcToJdbcApp") +public class JdbcToJdbcApp implements StreamingApplication +{ + @Override + public void populateDAG(DAG dag, Configuration conf) + { + JdbcPOJOInputOperator jdbcInputOperator = dag.addOperator("JdbcInput", new JdbcPOJOInputOperator()); + JdbcStore store = new JdbcStore(); + jdbcInputOperator.setStore(store); + jdbcInputOperator.setFieldInfos(addFieldInfos()); + + /** + * The class given below can be updated to the user defined class based on + * input table schema The addField infos method needs to be updated + * accordingly This line can be commented and class can be set from the + * properties file + */ + //dag.setOutputPortAttribute(jdbcInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, PojoEvent.class); + + JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", new JdbcPOJOInsertOutputOperator()); + JdbcTransactionalStore outputStore = new JdbcTransactionalStore(); + jdbcOutputOperator.setStore(outputStore); + jdbcOutputOperator.setFieldInfos(addJdbcFieldInfos()); + + /** + * The class given below can be updated to the user defined class based on + * input table schema The addField infos method needs to be updated + * accordingly This line can be commented and class can be set from the + * properties file + */ + //dag.setInputPortAttribute(jdbcOutputOperator.input, Context.PortContext.TUPLE_CLASS, PojoEvent.class); + + dag.addStream("POJO's", jdbcInputOperator.outputPort, jdbcOutputOperator.input) + .setLocality(Locality.CONTAINER_LOCAL); + } + + /** + * This method can be modified to have field mappings based on used defined + * class
+ * User can choose to have a SQL support type as an additional paramter + */ + private List addJdbcFieldInfos() + { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new com.datatorrent.lib.db.jdbc.JdbcFieldInfo("ACCOUNT_NO", "accountNumber", SupportType.INTEGER,0)); + fieldInfos.add(new com.datatorrent.lib.db.jdbc.JdbcFieldInfo("NAME", "name", SupportType.STRING,0)); + fieldInfos.add(new com.datatorrent.lib.db.jdbc.JdbcFieldInfo("AMOUNT", "amount", SupportType.INTEGER,0)); + return fieldInfos; + } + + /** + * This method can be modified to have field mappings based on used defined + * class + */ + private List addFieldInfos() + { + List fieldInfos = Lists.newArrayList(); + fieldInfos.add(new FieldInfo("ACCOUNT_NO", "accountNumber", SupportType.INTEGER)); + fieldInfos.add(new FieldInfo("NAME", "name", SupportType.STRING)); + fieldInfos.add(new FieldInfo("AMOUNT", "amount", SupportType.INTEGER)); + return fieldInfos; + } + +} diff --git a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java b/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java new file mode 100644 index 0000000000..5154db3802 --- /dev/null +++ b/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java @@ -0,0 +1,44 @@ +package com.example.mydtapp; + +public class PojoEvent +{ + @Override + public String toString() + { + return "TestPOJOEvent [accountNumber=" + accountNumber + ", name=" + name + ", amount=" + amount + "]"; + } + + private int accountNumber; + private String name; + private int amount; + + public int getAccountNumber() + { + return accountNumber; + } + + public void setAccountNumber(int accountNumber) + { + this.accountNumber = accountNumber; + } + + public String getName() + { + return name; + } + + public void setName(String name) + { + this.name = name; + } + + public int getAmount() + { + return amount; + } + + public void setAmount(int amount) + { + this.amount = amount; + } +} diff --git a/examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml b/examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml new file mode 100644 index 0000000000..904d297a2a --- /dev/null +++ b/examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml @@ -0,0 +1,88 @@ + + + + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseDriver + + org.hsqldb.jdbcDriver + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseUrl + + jdbc:hsqldb:mem:test + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.fetchSize + + 120 + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.port.outputPort.attr.TUPLE_CLASS + + com.example.mydtapp.PojoEvent + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.query + + select * from test_event_table + + + + + + dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.tableName + + test_event_table + + + + + dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseDriver + + org.hsqldb.jdbcDriver + + + + + dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseUrl + + jdbc:hsqldb:mem:test + + + + + dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.batchSize + + 5 + + + + + dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.tablename + + test_output_event_table + + + + + dt.application.JdbcToJdbcApp.operator.JdbcOutput.port.input.attr.TUPLE_CLASS + + com.example.mydtapp.PojoEvent + + + + diff --git a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java b/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java new file mode 100644 index 0000000000..ea4c3457e1 --- /dev/null +++ b/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java @@ -0,0 +1,42 @@ +/** + * Put your copyright and license info here. + */ +package com.example.mydtapp; + +import java.io.IOException; + +import javax.validation.ConstraintViolationException; + +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.LocalMode; + +/** + * Test the DAG declaration in local mode.
+ * The assumption to run this test case is that test_event_table,meta-table and + * test_output_event_table are created already + */ +public class ApplicationTest +{ + + @Test + @Ignore + public void testApplication() throws IOException, Exception + { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); + lma.prepareDAG(new JdbcToJdbcApp(), conf); + LocalMode.Controller lc = lma.getController(); + lc.run(50000); // runs for 10 seconds and quits + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } + +} diff --git a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java b/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java new file mode 100644 index 0000000000..f4709ba93c --- /dev/null +++ b/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.example.mydtapp; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import javax.validation.ConstraintViolationException; + +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.LocalMode; +import com.datatorrent.lib.db.jdbc.AbstractJdbcInputOperator; +import com.datatorrent.lib.db.jdbc.AbstractJdbcTransactionableOutputOperator; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; + +/** + * Tests for {@link AbstractJdbcTransactionableOutputOperator} and + * {@link AbstractJdbcInputOperator} + */ +public class JdbcOperatorTest +{ + public static final String DB_DRIVER = "org.hsqldb.jdbcDriver"; + public static final String URL = "jdbc:hsqldb:mem:test;sql.syntax_mys=true"; + + private static final String TABLE_NAME = "test_event_table"; + private static final String OUTPUT_TABLE_NAME = "test_output_event_table"; + + @BeforeClass + public static void setup() + { + + try { + Class.forName(DB_DRIVER).newInstance(); + + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + + String createMetaTable = "CREATE TABLE IF NOT EXISTS " + JdbcTransactionalStore.DEFAULT_META_TABLE + " ( " + + JdbcTransactionalStore.DEFAULT_APP_ID_COL + " VARCHAR(100) NOT NULL, " + + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + " INT NOT NULL, " + + JdbcTransactionalStore.DEFAULT_WINDOW_COL + " BIGINT NOT NULL, " + "UNIQUE (" + + JdbcTransactionalStore.DEFAULT_APP_ID_COL + ", " + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + ", " + + JdbcTransactionalStore.DEFAULT_WINDOW_COL + ") " + ")"; + + System.out.println(createMetaTable); + stmt.executeUpdate(createMetaTable); + + String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; + stmt.executeUpdate(createTable); + insertEventsInTable(10, 0); + + String createOutputTable = "CREATE TABLE IF NOT EXISTS " + OUTPUT_TABLE_NAME + + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; + stmt.executeUpdate(createOutputTable); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static void cleanTable() + { + try { + Connection con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + String cleanTable = "delete from " + TABLE_NAME; + stmt.executeUpdate(cleanTable); + String cleanOutputTable = "delete from " + OUTPUT_TABLE_NAME; + stmt.executeUpdate(cleanOutputTable); + + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + public static void insertEventsInTable(int numEvents, int offset) + { + try { + Connection con = DriverManager.getConnection(URL); + String insert = "insert into " + TABLE_NAME + " values (?,?,?)"; + PreparedStatement stmt = con.prepareStatement(insert); + for (int i = 0; i < numEvents; i++, offset++) { + stmt.setInt(1, offset); + stmt.setString(2, "Account_Holder-" + offset); + stmt.setInt(3, (offset * 1000)); + stmt.executeUpdate(); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + public int getNumOfEventsInStore() + { + Connection con; + try { + con = DriverManager.getConnection(URL); + Statement stmt = con.createStatement(); + + String countQuery = "SELECT count(*) from " + OUTPUT_TABLE_NAME; + ResultSet resultSet = stmt.executeQuery(countQuery); + resultSet.next(); + return resultSet.getInt(1); + } catch (SQLException e) { + throw new RuntimeException("fetching count", e); + } + } + + @Test + public void testApplication() throws Exception + { + try { + LocalMode lma = LocalMode.newInstance(); + Configuration conf = new Configuration(false); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); + lma.prepareDAG(new JdbcToJdbcApp(), conf); + LocalMode.Controller lc = lma.getController(); + lc.runAsync(); + + // wait for records to be added to table + Thread.sleep(5000); + + Assert.assertEquals("Events in store", 10, getNumOfEventsInStore()); + cleanTable(); + + } catch (ConstraintViolationException e) { + Assert.fail("constraint violations: " + e.getConstraintViolations()); + } + } +} diff --git a/examples/jdbcToJdbc/src/test/resources/example.sql b/examples/jdbcToJdbc/src/test/resources/example.sql new file mode 100644 index 0000000000..104240c5c5 --- /dev/null +++ b/examples/jdbcToJdbc/src/test/resources/example.sql @@ -0,0 +1,36 @@ +DROP DATABASE IF EXISTS testDev; + +CREATE DATABASE testDev; + +USE testDev; + +CREATE TABLE IF NOT EXISTS `test_event_table` ( + `ACCOUNT_NO` int(11) NOT NULL, + `NAME` varchar(255) DEFAULT NULL, + `AMOUNT` int(11) DEFAULT NULL +) ENGINE=MyISAM DEFAULT CHARSET=latin1; + +INSERT INTO `test_event_table` (`ACCOUNT_NO`, `NAME`, `AMOUNT`) VALUES +(1, 'User1', 1000), +(2, 'User2', 2000), +(3, 'User3', 3000), +(4, 'User4', 4000), +(5, 'User5', 5000), +(6, 'User6', 6000), +(7, 'User7', 7000), +(8, 'User8', 8000), +(9, 'User9', 9000), +(10, 'User10', 1000); + +CREATE TABLE IF NOT EXISTS `test_output_event_table` ( + `ACCOUNT_NO` int(11) NOT NULL, + `NAME` varchar(255) DEFAULT NULL, + `AMOUNT` int(11) DEFAULT NULL +) ENGINE=MyISAM DEFAULT CHARSET=latin1; + +CREATE TABLE IF NOT EXISTS `dt_meta` ( + `dt_app_id` VARCHAR(100) NOT NULL, + `dt_operator_id` INT NOT NULL, + `dt_window` BIGINT NOT NULL, +UNIQUE (`dt_app_id`, `dt_operator_id`, `dt_window`) +) ENGINE=MyISAM DEFAULT CHARSET=latin1; diff --git a/examples/jdbcToJdbc/src/test/resources/log4j.properties b/examples/jdbcToJdbc/src/test/resources/log4j.properties new file mode 100644 index 0000000000..3bfcdc5517 --- /dev/null +++ b/examples/jdbcToJdbc/src/test/resources/log4j.properties @@ -0,0 +1,21 @@ +log4j.rootLogger=DEBUG,CONSOLE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n + +log4j.appender.RFA=org.apache.log4j.RollingFileAppender +log4j.appender.RFA.layout=org.apache.log4j.PatternLayout +log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n +log4j.appender.RFA.File=/tmp/app.log + +# to enable, add SYSLOG to rootLogger +log4j.appender.SYSLOG=org.apache.log4j.net.SyslogAppender +log4j.appender.SYSLOG.syslogHost=127.0.0.1 +log4j.appender.SYSLOG.layout=org.apache.log4j.PatternLayout +log4j.appender.SYSLOG.layout.conversionPattern=${dt.cid} %-5p [%t] %c{2} %x - %m%n +log4j.appender.SYSLOG.Facility=LOCAL1 + +log4j.logger.org=info +#log4j.logger.org.apache.commons.beanutils=warn +log4j.logger.com.datatorrent=debug From 9b99e0aec48467b22187277c8345d68bda153152 Mon Sep 17 00:00:00 2001 From: Lakshmi Prasanna Velineni Date: Tue, 18 Apr 2017 10:17:19 -0700 Subject: [PATCH 3/4] Fixed tests and POM.Changes related to sql connector. --- examples/fileToJdbc/.gitignore | 3 - examples/fileToJdbc/README.md | 70 ---- .../XmlJavadocCommentsExtractor.xsl | 44 --- examples/fileToJdbc/pom.xml | 293 ---------------- examples/jdbc/README.md | 193 +++++++++++ examples/jdbc/pom.xml | 79 +++++ .../src/assemble/appPackage.xml | 0 .../examples}/FileToJdbcApp/CustomParser.java | 2 +- .../examples}/FileToJdbcApp/FileReader.java | 2 +- .../FileToJdbcApp/FileToJdbcCsvParser.java | 12 +- .../FileToJdbcApp/FileToJdbcCustomParser.java | 2 +- .../examples/FileToJdbcApp}/PojoEvent.java | 3 +- .../JdbcIngest}/FileLineOutputOperator.java | 2 +- .../examples/JdbcIngest}/JdbcHDFSApp.java | 5 +- .../JdbcIngest}/JdbcPollerApplication.java | 5 +- .../apex/examples/JdbcIngest}/PojoEvent.java | 3 +- .../examples/JdbcToJdbc}/JdbcToJdbcApp.java | 2 +- .../apex/examples/JdbcToJdbc}/PojoEvent.java | 2 +- .../META-INF/properties-FileToJdbcApp.xml} | 30 +- .../META-INF/properties-JdbcToJdbcApp.xml} | 34 +- .../META-INF/properties-PollJdbcToHDFSApp.xml | 71 ++++ .../properties-SimpleJdbcToHDFSApp.xml | 10 +- .../src/main/resources/schema.json | 0 .../FileToJdbcApp/ApplicationTest.java | 29 +- .../examples/JdbcIngest}/ApplicationTest.java | 2 +- .../JdbcIngest}/JdbcInputAppTest.java | 2 +- .../JdbcPollerApplicationTest.java | 19 +- .../examples/JdbcToJdbc}/ApplicationTest.java | 4 +- .../JdbcToJdbc}/JdbcOperatorTest.java | 23 +- .../test/resources/example-FileToJdbcApp.sql} | 0 .../test/resources/example-JdbcIngest.sql} | 0 .../test/resources/example-JdbcToJdbc.sql} | 0 .../src/test/resources/log4j.properties | 0 .../test/resources/test-FileToJdbcApp.xml} | 8 +- .../test-input/sample-FileToJdbc.txt} | 0 examples/jdbcIngest/.gitignore | 1 - examples/jdbcIngest/README.md | 65 ---- .../XmlJavadocCommentsExtractor.xsl | 44 --- examples/jdbcIngest/pom.xml | 298 ---------------- .../jdbcIngest/src/assemble/appPackage.xml | 43 --- .../META-INF/properties-PollJdbcToHDFSApp.xml | 73 ---- .../src/test/resources/log4j.properties | 21 -- examples/jdbcToJdbc/.gitignore | 1 - examples/jdbcToJdbc/README.md | 55 --- .../XmlJavadocCommentsExtractor.xsl | 44 --- examples/jdbcToJdbc/pom.xml | 319 ------------------ .../jdbcToJdbc/src/assemble/appPackage.xml | 43 --- .../src/test/resources/log4j.properties | 21 -- examples/pom.xml | 2 + 49 files changed, 457 insertions(+), 1527 deletions(-) delete mode 100755 examples/fileToJdbc/.gitignore delete mode 100755 examples/fileToJdbc/README.md delete mode 100755 examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl delete mode 100755 examples/fileToJdbc/pom.xml create mode 100644 examples/jdbc/README.md create mode 100644 examples/jdbc/pom.xml rename examples/{fileToJdbc => jdbc}/src/assemble/appPackage.xml (100%) mode change 100755 => 100644 rename examples/{fileToJdbc/src/main/java/com/example => jdbc/src/main/java/org/apache/apex/examples}/FileToJdbcApp/CustomParser.java (98%) rename examples/{fileToJdbc/src/main/java/com/example => jdbc/src/main/java/org/apache/apex/examples}/FileToJdbcApp/FileReader.java (90%) rename examples/{fileToJdbc/src/main/java/com/example => jdbc/src/main/java/org/apache/apex/examples}/FileToJdbcApp/FileToJdbcCsvParser.java (97%) rename examples/{fileToJdbc/src/main/java/com/example => jdbc/src/main/java/org/apache/apex/examples}/FileToJdbcApp/FileToJdbcCustomParser.java (97%) rename examples/{jdbcIngest/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp}/PojoEvent.java (93%) mode change 100644 => 100755 rename examples/{jdbcIngest/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/JdbcIngest}/FileLineOutputOperator.java (96%) rename examples/{jdbcIngest/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/JdbcIngest}/JdbcHDFSApp.java (97%) rename examples/{jdbcIngest/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/JdbcIngest}/JdbcPollerApplication.java (97%) rename examples/{fileToJdbc/src/main/java/com/example/FileToJdbcApp => jdbc/src/main/java/org/apache/apex/examples/JdbcIngest}/PojoEvent.java (93%) mode change 100755 => 100644 rename examples/{jdbcToJdbc/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc}/JdbcToJdbcApp.java (98%) rename examples/{jdbcToJdbc/src/main/java/com/example/mydtapp => jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc}/PojoEvent.java (93%) rename examples/{fileToJdbc/src/main/resources/META-INF/properties.xml => jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml} (63%) rename examples/{jdbcToJdbc/src/main/resources/META-INF/properties.xml => jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml} (60%) create mode 100644 examples/jdbc/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml rename examples/{jdbcIngest => jdbc}/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml (89%) rename examples/{fileToJdbc => jdbc}/src/main/resources/schema.json (100%) rename examples/{fileToJdbc/src/test/java/com/example => jdbc/src/test/java/org/apache/apex/examples}/FileToJdbcApp/ApplicationTest.java (91%) rename examples/{jdbcIngest/src/test/java/com/example/mydtapp => jdbc/src/test/java/org/apache/apex/examples/JdbcIngest}/ApplicationTest.java (97%) rename examples/{jdbcIngest/src/test/java/com/example/mydtapp => jdbc/src/test/java/org/apache/apex/examples/JdbcIngest}/JdbcInputAppTest.java (98%) rename examples/{jdbcIngest/src/test/java/com/example/mydtapp => jdbc/src/test/java/org/apache/apex/examples/JdbcIngest}/JdbcPollerApplicationTest.java (92%) rename examples/{jdbcToJdbc/src/test/java/com/example/mydtapp => jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc}/ApplicationTest.java (92%) rename examples/{jdbcToJdbc/src/test/java/com/example/mydtapp => jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc}/JdbcOperatorTest.java (90%) rename examples/{fileToJdbc/src/test/resources/example.sql => jdbc/src/test/resources/example-FileToJdbcApp.sql} (100%) rename examples/{jdbcIngest/src/test/resources/example.sql => jdbc/src/test/resources/example-JdbcIngest.sql} (100%) rename examples/{jdbcToJdbc/src/test/resources/example.sql => jdbc/src/test/resources/example-JdbcToJdbc.sql} (100%) rename examples/{fileToJdbc => jdbc}/src/test/resources/log4j.properties (100%) mode change 100755 => 100644 rename examples/{fileToJdbc/src/test/resources/test.xml => jdbc/src/test/resources/test-FileToJdbcApp.xml} (89%) rename examples/{fileToJdbc/src/test/resources/test-input/sample.txt => jdbc/src/test/resources/test-input/sample-FileToJdbc.txt} (100%) delete mode 100644 examples/jdbcIngest/.gitignore delete mode 100644 examples/jdbcIngest/README.md delete mode 100644 examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl delete mode 100644 examples/jdbcIngest/pom.xml delete mode 100644 examples/jdbcIngest/src/assemble/appPackage.xml delete mode 100644 examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml delete mode 100644 examples/jdbcIngest/src/test/resources/log4j.properties delete mode 100644 examples/jdbcToJdbc/.gitignore delete mode 100644 examples/jdbcToJdbc/README.md delete mode 100644 examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl delete mode 100644 examples/jdbcToJdbc/pom.xml delete mode 100644 examples/jdbcToJdbc/src/assemble/appPackage.xml delete mode 100644 examples/jdbcToJdbc/src/test/resources/log4j.properties diff --git a/examples/fileToJdbc/.gitignore b/examples/fileToJdbc/.gitignore deleted file mode 100755 index 019edc2a0f..0000000000 --- a/examples/fileToJdbc/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.DS_Store -/.idea/ -/target/ diff --git a/examples/fileToJdbc/README.md b/examples/fileToJdbc/README.md deleted file mode 100755 index ad9f6b9523..0000000000 --- a/examples/fileToJdbc/README.md +++ /dev/null @@ -1,70 +0,0 @@ -## Sample File to JDBC Example - -This example shows how to read files from HDFS, parse into POJOs and then insert into a table in MySQL. - -Given various parsing demands, we give two applications under this package, `FileToJdbcCsvParser` and `FileToJdbcCustomParser`. - -`CsvParser` allows you to parse only CSV format input files. For more complex input format, `CustomParser` allows you to set custom regex to parse. - -Accordingly, we have two additional configuration files (`src/site/conf/exampleCsvParser.xml` and `src/site/conf/exampleCustomParser.xml`) besides the common properties file (`/src/main/resources/META-INF/properties.xml`). - -Users can choose which applicaiton and which addtional configuration file to use during launch time. - - -####**Update Properties:** - -- Update these common properties in the file `/src/main/resources/META-INF/properties.xml`: - -| Property Name | Description | -| ------------- | ----------- | -| dt.operator.FileReader.prop.directory |HDFS input directory path -|dt.operator.JdbcOutput.prop.store.databaseUrl | database URL of the form `jdbc:mysql://hostName:portNumber/dbName` | -| dt.operator.JdbcOutput.prop.store.userName | MySQL user name | -| dt.operator.JdbcOutput.prop.store.password | MySQL user password | -| dt.operator.JdbcOutput.prop.tablename | MySQL output table name | - -- Using CustomParser: update `regexStr` in file `src/site/conf/exampleCustomParser.xml` - - -####**Sample Input:** - -- To set up MySQL database and create table, check `src/test/resources/example.sql` -- To run this example, create files using this format: - -``` - 1,User1,1000 - 2,User2,2000 - 3,User3,3000 - 4,User4,4000 - 5,User5,5000 - 6,User6,6000 - 7,User7,7000 - 8,User8,8000 - 9,User9,9000 - 10,User10,10000 -``` -- To change input format, update `PojoEvent` class and `addFieldInfos()` method in `src/main/java/com/example/FileToJdbcApp`. If using CsvParser, also update `src/main/resources/schema.json`. - -####**Sample Output:** - -- After running successfully, verify -that the database table has the expected output: - -``` - mysql> select * from table_name; - +------------+--------+--------+ - | ACCOUNT_NO | NAME | AMOUNT | - +------------+--------+--------+ - | 1 | User1 | 1000 | - | 2 | User2 | 2000 | - | 3 | User3 | 3000 | - | 4 | User4 | 4000 | - | 5 | User5 | 5000 | - | 6 | User6 | 6000 | - | 7 | User7 | 7000 | - | 8 | User8 | 8000 | - | 9 | User9 | 9000 | - | 10 | User10 | 10000 | - +------------+--------+--------+ - 10 rows in set (0.00 sec) -``` diff --git a/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl b/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl deleted file mode 100755 index 08075a98d3..0000000000 --- a/examples/fileToJdbc/XmlJavadocCommentsExtractor.xsl +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/examples/fileToJdbc/pom.xml b/examples/fileToJdbc/pom.xml deleted file mode 100755 index ae62e0cb83..0000000000 --- a/examples/fileToJdbc/pom.xml +++ /dev/null @@ -1,293 +0,0 @@ - - - 4.0.0 - - com.example - 1.0-SNAPSHOT - FileToJdbcApp - jar - - - File to JDBC - My Apex Application Description - - - - 3.5.0 - lib/*.jar - 3.6.0 - - - - - - org.apache.maven.plugins - maven-eclipse-plugin - 2.9 - - true - - - - maven-compiler-plugin - 3.3 - - UTF-8 - 1.7 - 1.7 - true - false - true - true - - - - maven-dependency-plugin - 2.8 - - - copy-dependencies - prepare-package - - copy-dependencies - - - target/deps - runtime - - - - - - - maven-assembly-plugin - - - app-package-assembly - package - - single - - - ${project.artifactId}-${project.version}-apexapp - false - - src/assemble/appPackage.xml - - - 0755 - - - - ${apex.apppackage.classpath} - ${apex.version} - ${project.groupId} - ${project.artifactId} - ${project.version} - ${project.name} - ${project.description} - - - - - - - - - maven-antrun-plugin - 1.7 - - - package - - - - - - - run - - - - - createJavadocDirectory - generate-resources - - - - - - - - run - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.9.1 - - - attach-artifacts - package - - attach-artifact - - - - - target/${project.artifactId}-${project.version}.apa - apa - - - false - - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - - xml-doclet - generate-resources - - javadoc - - - com.github.markusbernhardt.xmldoclet.XmlDoclet - -d ${project.build.directory}/generated-resources/xml-javadoc -filename ${project.artifactId}-${project.version}-javadoc.xml - false - - com.github.markusbernhardt - xml-doclet - 1.0.4 - - - - - - - - org.codehaus.mojo - xml-maven-plugin - 1.0 - - - transform-xmljavadoc - generate-resources - - transform - - - - - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - XmlJavadocCommentsExtractor.xsl - ${project.build.directory}/generated-resources/xml-javadoc - - - - - - - maven-resources-plugin - 2.6 - - - copy-resources - process-resources - - copy-resources - - - ${basedir}/target/classes - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - true - - - - - - - - - - - - - - - org.apache.apex - malhar-library - ${malhar.version} - - - org.apache.apex - malhar-contrib - ${malhar.version} - - - org.apache.apex - apex-common - ${apex.version} - provided - - - junit - junit - 4.10 - test - - - org.apache.apex - apex-engine - ${apex.version} - test - - - mysql - mysql-connector-java - 5.1.36 - - - org.codehaus.janino - janino - 2.7.8 - - - org.codehaus.janino - commons-compiler - 2.7.8 - - - net.sf.supercsv - super-csv - 2.4.0 - - - org.hsqldb - hsqldb - 2.3.1 - - - - diff --git a/examples/jdbc/README.md b/examples/jdbc/README.md new file mode 100644 index 0000000000..72c4fbbc91 --- /dev/null +++ b/examples/jdbc/README.md @@ -0,0 +1,193 @@ +## File to JDBC Example (FileToJdbcApp) + +This example shows how to read files from HDFS, parse into POJOs and then insert into a table in a database. + +Given various parsing demands, we give two applications under this package, `FileToJdbcCsvParser` and `FileToJdbcCustomParser`. + +`CsvParser` allows you to parse only CSV format input files. For more complex input format, `CustomParser` allows you to set custom regex to parse. + +A sample properties file (`/src/main/resources/META-INF/properties-FileToJdbcApp.xml`) is provided for these applications and would need to be +customized according to the user's environment. + +The applications can then be launched using the apex command line interface and selecting the above configuration file using a parameter during +launch. + +####**Update Properties:** + +- Update these common properties in the file `/src/main/resources/META-INF/properties-FileToJdbcApp.xml`: + +| Property Name | Description | +| ------------- | ----------- | +| dt.operator.FileReader.prop.directory |HDFS input directory path +| dt.operator.JdbcOutput.prop.store.databaseUrl | database URL | +| dt.operator.JdbcOutput.prop.store.userName | database user name | +| dt.operator.JdbcOutput.prop.store.password | database user password | +| dt.operator.JdbcOutput.prop.tablename | database output table name | +| dt.operator.CustomParser.prop.regexStr | update regexStr if needed| + +####**Sample Input:** + +- To set up database and create table, check `src/test/resources/example-FileToJdbcApp-sql.txt` +- To run this example, create files using this format: + +``` + 1,User1,1000 + 2,User2,2000 + 3,User3,3000 + 4,User4,4000 + 5,User5,5000 + 6,User6,6000 + 7,User7,7000 + 8,User8,8000 + 9,User9,9000 + 10,User10,10000 +``` +- To change input format, update `PojoEvent` class and `addFieldInfos()` method in `src/main/java/org/apache/apex/examples/FileToJdbcApp`. +If using CsvParser, also update `src/main/resources/schema.json`. + +####**Sample Output:** + +- After running successfully, verify +that the database table has the expected output: + +``` + mysql> select * from table_name; + +------------+--------+--------+ + | ACCOUNT_NO | NAME | AMOUNT | + +------------+--------+--------+ + | 1 | User1 | 1000 | + | 2 | User2 | 2000 | + | 3 | User3 | 3000 | + | 4 | User4 | 4000 | + | 5 | User5 | 5000 | + | 6 | User6 | 6000 | + | 7 | User7 | 7000 | + | 8 | User8 | 8000 | + | 9 | User9 | 9000 | + | 10 | User10 | 10000 | + +------------+--------+--------+ + 10 rows in set (0.00 sec) +``` + + +## JDBC ingestion examples + +This project contains two applications to read records from a table in database, create POJOs and write them to a file +in the user specified directory in HDFS. + +1. SimpleJdbcToHDFSApp: Reads table records as per given query and emits them as POJOs. +2. PollJdbcToHDFSApp: Reads table records using partitions in parallel fashion also polls for newly **appended** records and emits them as POJOs. + +Follow these steps to run these applications: + +**Step 1**: Update these properties in the file `src/main/resources/META_INF/properties-.xml`, where represents +the application name and is one of two names above: + +| Property Name | Description | +| ------------- | ----------- | +| dt.application..operator.JdbcInput.prop.store.databaseUrl | database URL, for example `jdbc:hsqldb:mem:test` | +| dt.application..operator.JdbcInput.prop.store.userName | database user name | +| dt.application..operator.JdbcInput.prop.store.password | database user password | +| dt.application..operator.FileOutputOperator.filePath | HDFS output directory path | + +**Step 2**: Create database table and add entries + +Go to the database console and run (where _{path}_ is a suitable prefix): + + source {path}/src/test/resources/example.sql + +After this, please verify that `testDev.test_event_table` is created and has 10 rows: + + select count(*) from testDev.test_event_table; + +----------+ + | count(*) | + +----------+ + | 10 | + +----------+ + +**Step 3**: Create HDFS output directory if not already present (_{path}_ should be the same as specified in `META_INF/properties-.xml`): + + hadoop fs -mkdir -p {path} + +**Step 4**: Build the code: + + mvn clean install + +**Step 5**: During launch use `src/main/resources/META_INF/properties-.xml` as a custom configuration file; then verify +that the output directory has the expected output: + + hadoop fs -cat /2_op.dat.* | wc -l + +This should return 10 as the count. + +Sample Output: + + hadoop fs -cat /2_op.dat.0 + PojoEvent [accountNumber=1, name=User1, amount=1000] + PojoEvent [accountNumber=2, name=User2, amount=2000] + PojoEvent [accountNumber=3, name=User3, amount=3000] + PojoEvent [accountNumber=4, name=User4, amount=4000] + PojoEvent [accountNumber=5, name=User5, amount=5000] + PojoEvent [accountNumber=6, name=User6, amount=6000] + PojoEvent [accountNumber=7, name=User7, amount=7000] + PojoEvent [accountNumber=8, name=User8, amount=8000] + PojoEvent [accountNumber=9, name=User9, amount=9000] + PojoEvent [accountNumber=10, name=User10, amount=1000] + + +## JdbcToJdbc App + +This application reads from a source table in a database, creates POJO's and writes the POJO's to another table in a database. + +Steps : + +Step 1 : Update the below properties in the properties file - `src/main/resources/META_INF/properties-JdbcToJdbcApp.xml` + +1.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseUrl +- data base URL for your database, for example jdbc:hsqldb:mem:test +2.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.userName +- mysql user name +3.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.password +- password +4.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseUrl +- data base URL for your database, for example jdbc:jdbc:hsqldb:mem:test +5.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.userName +- mysql user name +6.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.password +- password + +Step 2: Create database, table and add entries + +Load into your database the contents of the following sql file + src/test/resources/example-JdbcToJdbc-sql.txt + +After this is done, please verify that testDev.test_event_table is created and has 10 rows.It will also create an output table by the name testDev.test_output_event_table + +select count(*) from testDev.test_event_table; ++----------+ +| count(*) | ++----------+ +| 10 | ++----------+ + +Step 3: Build the code, +shell>mvn clean install + +This will compile the project and create the application package in the target folder. + +Step 4 : Launch the application package with the apex command line interface and +select the above configuration file during launch. + +Verification : + +Log on to the mysql console + +select count(*) from testDev.test_event_table; ++----------+ +| count(*) | ++----------+ +| 10 | ++----------+ + + + diff --git a/examples/jdbc/pom.xml b/examples/jdbc/pom.xml new file mode 100644 index 0000000000..b697a689c5 --- /dev/null +++ b/examples/jdbc/pom.xml @@ -0,0 +1,79 @@ + + + 4.0.0 + + malhar-examples-JDBC + jar + + Apache Apex Malhar JDBC Examples + Demostrates the JDBC Examples + + + org.apache.apex + malhar-examples + 3.8.0-SNAPSHOT + + + + + + + org.apache.apex + apex-engine + ${apex.core.version} + test + + + + org.apache.apex + malhar-contrib + ${project.version} + + + * + * + + + + + + + org.hsqldb + hsqldb + 2.3.1 + + + + org.jooq + jooq + 3.6.4 + + + + org.codehaus.janino + janino + 2.7.8 + + + + org.codehaus.janino + commons-compiler + 2.7.8 + + + + net.sf.supercsv + super-csv + 2.4.0 + + + + org.hsqldb + hsqldb + 2.3.1 + test + + + + diff --git a/examples/fileToJdbc/src/assemble/appPackage.xml b/examples/jdbc/src/assemble/appPackage.xml old mode 100755 new mode 100644 similarity index 100% rename from examples/fileToJdbc/src/assemble/appPackage.xml rename to examples/jdbc/src/assemble/appPackage.xml diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java similarity index 98% rename from examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java index e08925598e..a22acc98a0 100755 --- a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/CustomParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.FileToJdbcApp; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.DefaultInputPort; diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java similarity index 90% rename from examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java index 201c705ec5..4065a5fc83 100755 --- a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileReader.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java @@ -1,4 +1,4 @@ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.FileToJdbcApp; import com.datatorrent.api.DefaultOutputPort; import org.apache.apex.malhar.lib.fs.LineByLineFileInputOperator; diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java similarity index 97% rename from examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java index 23d3f36a11..d60b275756 100755 --- a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCsvParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java @@ -1,4 +1,10 @@ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.FileToJdbcApp; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +import com.google.common.collect.Lists; import com.datatorrent.api.DAG; import com.datatorrent.api.StreamingApplication; @@ -8,10 +14,6 @@ import com.datatorrent.lib.db.jdbc.JdbcFieldInfo; import com.datatorrent.lib.db.jdbc.JdbcPOJOInsertOutputOperator; import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; -import com.google.common.collect.Lists; -import org.apache.hadoop.conf.Configuration; - -import java.util.List; import static java.sql.Types.INTEGER; import static java.sql.Types.VARCHAR; diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java similarity index 97% rename from examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java index c13377f492..42d1d235fb 100755 --- a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/FileToJdbcCustomParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java @@ -1,4 +1,4 @@ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.FileToJdbcApp; import com.datatorrent.api.DAG; import com.datatorrent.api.StreamingApplication; diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java old mode 100644 new mode 100755 similarity index 93% rename from examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java index f56522b6a5..6de92744bc --- a/examples/jdbcIngest/src/main/java/com/example/mydtapp/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java @@ -1,4 +1,4 @@ -package com.example.mydtapp; +package org.apache.apex.examples.FileToJdbcApp; public class PojoEvent { @@ -42,3 +42,4 @@ public void setAmount(int amount) this.amount = amount; } } + diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/FileLineOutputOperator.java similarity index 96% rename from examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/FileLineOutputOperator.java index e155f2311b..93bd8a41cf 100644 --- a/examples/jdbcIngest/src/main/java/com/example/mydtapp/FileLineOutputOperator.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/FileLineOutputOperator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import com.datatorrent.lib.io.fs.AbstractFileOutputOperator; diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcHDFSApp.java similarity index 97% rename from examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcHDFSApp.java index 5605bcfcd2..5e1efff545 100644 --- a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcHDFSApp.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcHDFSApp.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import java.util.List; @@ -24,7 +24,6 @@ import com.google.common.collect.Lists; -import com.datatorrent.api.Context; import com.datatorrent.api.DAG; import com.datatorrent.api.DAG.Locality; import com.datatorrent.api.StreamingApplication; @@ -60,7 +59,7 @@ public void populateDAG(DAG dag, Configuration conf) } /** - * This method can be modified to have field mappings based on used defined + * This method can be modified to have field mappings based on user defined * class */ private List addFieldInfos() diff --git a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java similarity index 97% rename from examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java index 54d71f71bb..de47ef4cfe 100644 --- a/examples/jdbcIngest/src/main/java/com/example/mydtapp/JdbcPollerApplication.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java @@ -1,9 +1,11 @@ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import java.util.List; import org.apache.hadoop.conf.Configuration; +import com.google.common.collect.Lists; + import com.datatorrent.api.Context.PortContext; import com.datatorrent.api.DAG; import com.datatorrent.api.StreamingApplication; @@ -12,7 +14,6 @@ import com.datatorrent.lib.db.jdbc.JdbcStore; import com.datatorrent.lib.util.FieldInfo; import com.datatorrent.lib.util.FieldInfo.SupportType; -import com.google.common.collect.Lists; @ApplicationAnnotation(name = "PollJdbcToHDFSApp") public class JdbcPollerApplication implements StreamingApplication diff --git a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java old mode 100755 new mode 100644 similarity index 93% rename from examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java index 7985b45402..be366b6ac3 --- a/examples/fileToJdbc/src/main/java/com/example/FileToJdbcApp/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java @@ -1,4 +1,4 @@ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.JdbcIngest; public class PojoEvent { @@ -42,4 +42,3 @@ public void setAmount(int amount) this.amount = amount; } } - diff --git a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java similarity index 98% rename from examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java index 6dffa8704a..e85e4affe9 100644 --- a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/JdbcToJdbcApp.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcToJdbc; import java.util.List; diff --git a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java similarity index 93% rename from examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java rename to examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java index 5154db3802..0abf74d8d2 100644 --- a/examples/jdbcToJdbc/src/main/java/com/example/mydtapp/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java @@ -1,4 +1,4 @@ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcToJdbc; public class PojoEvent { diff --git a/examples/fileToJdbc/src/main/resources/META-INF/properties.xml b/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml similarity index 63% rename from examples/fileToJdbc/src/main/resources/META-INF/properties.xml rename to examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml index 4f706c4d3d..29b911b5e0 100755 --- a/examples/fileToJdbc/src/main/resources/META-INF/properties.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml @@ -2,23 +2,25 @@ dt.operator.JdbcOutput.prop.store.databaseDriver - com.mysql.jdbc.Driver + + org.hsqldb.jdbcDriver dt.operator.JdbcOutput.prop.store.databaseUrl - jdbc:mysql://hostName:portNumber/dbName + + jdbc:hsqldb:mem:test - + dt.operator.JdbcOutput.prop.batchSize @@ -32,17 +34,23 @@ dt.operator.JdbcOutput.port.input.attr.TUPLE_CLASS - com.example.FileToJdbcApp.PojoEvent + org.apache.apex.examples.FileToJdbcApp.PojoEvent - + - dt.operator.FileReader.prop.directory - input_directory + dt.operator.CsvParser.port.out.attr.TUPLE_CLASS + org.apache.apex.examples.FileToJdbcApp.PojoEvent + + + + dt.operator.CustomParser.prop.regexStr + , - dt.loggers.level - com.datatorrent.*:INFO,org.apache.*:INFO + dt.operator.FileReader.prop.directory + input_directory + diff --git a/examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml b/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml similarity index 60% rename from examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml rename to examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml index 904d297a2a..d9199c67e3 100644 --- a/examples/jdbcToJdbc/src/main/resources/META-INF/properties.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml @@ -8,35 +8,35 @@ - dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseDriver - + dt.operator.JdbcInput.prop.store.databaseDriver + org.hsqldb.jdbcDriver - dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseUrl - + dt.operator.JdbcInput.prop.store.databaseUrl + jdbc:hsqldb:mem:test - dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.fetchSize + dt.operator.JdbcInput.prop.fetchSize 120 - dt.application.JdbcToJdbcApp.operator.JdbcInput.port.outputPort.attr.TUPLE_CLASS + dt.operator.JdbcInput.port.outputPort.attr.TUPLE_CLASS - com.example.mydtapp.PojoEvent + org.apache.apex.examples.JdbcToJdbc.PojoEvent - dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.query + dt.operator.JdbcInput.prop.query select * from test_event_table @@ -44,44 +44,44 @@ - dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.tableName + dt.operator.JdbcInput.prop.tableName test_event_table - dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseDriver - + dt.operator.JdbcOutput.prop.store.databaseDriver + org.hsqldb.jdbcDriver - dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseUrl - + dt.operator.JdbcOutput.prop.store.databaseUrl + jdbc:hsqldb:mem:test - dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.batchSize + dt.operator.JdbcOutput.prop.batchSize 5 - dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.tablename + dt.operator.JdbcOutput.prop.tablename test_output_event_table - dt.application.JdbcToJdbcApp.operator.JdbcOutput.port.input.attr.TUPLE_CLASS + dt.operator.JdbcOutput.port.input.attr.TUPLE_CLASS - com.example.mydtapp.PojoEvent + org.apache.apex.examples.JdbcToJdbc.PojoEvent diff --git a/examples/jdbc/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml b/examples/jdbc/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml new file mode 100644 index 0000000000..b67f845f20 --- /dev/null +++ b/examples/jdbc/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml @@ -0,0 +1,71 @@ + + + + + dt.application.operator.JdbcPoller.prop.partitionCount + 2 + + + + dt.application.operator.JdbcPoller.prop.store.databaseDriver + + org.hsqldb.jdbcDriver + + + + dt.application.operator.JdbcPoller.prop.store.databaseUrl + + jdbc:hsqldb:mem:test + + + + + + + dt.application.operator.JdbcPoller.prop.batchSize + 50 + + + + + dt.application.operator.JdbcPoller.prop.key + ACCOUNT_NO + + + + dt.application.operator.JdbcPoller.prop.columnsExpression + ACCOUNT_NO,NAME,AMOUNT + + + dt.application.operator.JdbcPoller.port.outputPort.attr.TUPLE_CLASS + org.apache.apex.examples.JdbcIngest.PojoEvent + + + + + dt.application.operator.JdbcPoller.prop.tableName + test_event_table + + + + dt.application.operator.JdbcPoller.prop.pollInterval + 1000 + + + + + dt.application.operator.Writer.filePath + /tmp/test/output + + + diff --git a/examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml b/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml similarity index 89% rename from examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml rename to examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml index 9fce7f8991..589dbcda5f 100644 --- a/examples/jdbcIngest/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml @@ -8,15 +8,15 @@ - dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseDriver - + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseDriver + org.hsqldb.jdbcDriver - dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseUrl - + dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.prop.store.databaseUrl + jdbc:hsqldb:mem:test @@ -46,7 +46,7 @@ dt.application.SimpleJdbcToHDFSApp.operator.JdbcInput.port.outputPort.attr.TUPLE_CLASS - com.example.mydtapp.PojoEvent + org.apache.apex.examples.JdbcIngest.PojoEvent diff --git a/examples/fileToJdbc/src/main/resources/schema.json b/examples/jdbc/src/main/resources/schema.json similarity index 100% rename from examples/fileToJdbc/src/main/resources/schema.json rename to examples/jdbc/src/main/resources/schema.json diff --git a/examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/FileToJdbcApp/ApplicationTest.java similarity index 91% rename from examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/FileToJdbcApp/ApplicationTest.java index 806bf2e3fc..3024836cc1 100755 --- a/examples/fileToJdbc/src/test/java/com/example/FileToJdbcApp/ApplicationTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/FileToJdbcApp/ApplicationTest.java @@ -1,17 +1,24 @@ -package com.example.FileToJdbcApp; +package org.apache.apex.examples.FileToJdbcApp; + +import java.io.File; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import javax.validation.ConstraintViolationException; -import com.datatorrent.api.LocalMode; -import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; -import com.datatorrent.netlet.util.DTThrowable; -import org.apache.hadoop.conf.Configuration; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; -import javax.validation.ConstraintViolationException; -import java.io.File; -import java.io.IOException; -import java.sql.*; +import org.apache.hadoop.conf.Configuration; + +import com.datatorrent.api.LocalMode; +import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; +import com.datatorrent.netlet.util.DTThrowable; /** * Test the DAG declaration in local mode.
@@ -82,7 +89,7 @@ public void testCsvParserApp() throws IOException, Exception { try { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); - conf.addResource(new File("src/test/resources/test.xml").toURI().toURL()); + conf.addResource(new File("src/test/resources/test-FileToJdbcApp.xml").toURI().toURL()); lma.prepareDAG(new FileToJdbcCsvParser(), conf); LocalMode.Controller lc = lma.getController(); @@ -104,7 +111,7 @@ public void testCustomParserApp() throws IOException, Exception { try { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); - conf.addResource(new File("src/test/resources/test.xml").toURI().toURL()); + conf.addResource(new File("src/test/resources/test-FileToJdbcApp.xml").toURI().toURL()); lma.prepareDAG(new FileToJdbcCustomParser(), conf); LocalMode.Controller lc = lma.getController(); diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/ApplicationTest.java similarity index 97% rename from examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/ApplicationTest.java index fb78944a93..080eae8a7c 100644 --- a/examples/jdbcIngest/src/test/java/com/example/mydtapp/ApplicationTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/ApplicationTest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import java.io.IOException; diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcInputAppTest.java similarity index 98% rename from examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcInputAppTest.java index 1d95f4de3f..574534fb64 100644 --- a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcInputAppTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcInputAppTest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import java.io.File; import java.io.IOException; diff --git a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplicationTest.java similarity index 92% rename from examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplicationTest.java index b96d4aeaab..91c8f27a4b 100644 --- a/examples/jdbcIngest/src/test/java/com/example/mydtapp/JdbcPollerApplicationTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplicationTest.java @@ -1,4 +1,4 @@ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcIngest; import java.io.File; import java.io.IOException; @@ -11,13 +11,14 @@ import javax.validation.ConstraintViolationException; -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; + import com.datatorrent.api.LocalMode; public class JdbcPollerApplicationTest @@ -32,6 +33,7 @@ public static void setup() { try { cleanup(); + dropTable(); } catch (Exception e) { throw new RuntimeException(e); } @@ -41,10 +43,9 @@ public static void setup() Connection con = DriverManager.getConnection(URL); Statement stmt = con.createStatement(); - String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + String createTable = "CREATE TABLE " + TABLE_NAME + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; stmt.executeUpdate(createTable); - cleanTable(); insertEventsInTable(10, 0); } catch (Exception e) { throw new RuntimeException(e); @@ -61,12 +62,12 @@ public static void cleanup() } } - public static void cleanTable() + public static void dropTable() { try { Connection con = DriverManager.getConnection(URL); Statement stmt = con.createStatement(); - String cleanTable = "delete from " + TABLE_NAME; + String cleanTable = "DROP TABLE IF EXISTS " + TABLE_NAME; stmt.executeUpdate(cleanTable); } catch (SQLException e) { throw new RuntimeException(e); @@ -103,7 +104,7 @@ public void testApplication() throws Exception conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.columnsExpression", "ACCOUNT_NO,NAME,AMOUNT"); conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.tableName", TABLE_NAME); conf.set("dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.port.outputPort.attr.TUPLE_CLASS", - "com.example.mydtapp.PojoEvent"); + "org.apache.apex.examples.JdbcIngest.PojoEvent"); conf.set("dt.application.PollJdbcToHDFSApp.operator.Writer.filePath", OUTPUT_DIR_NAME); lma.prepareDAG(new JdbcPollerApplication(), conf); @@ -111,7 +112,7 @@ public void testApplication() throws Exception lc.runAsync(); // wait for output files to roll - Thread.sleep(5000); + Thread.sleep(45000); String[] extensions = { "dat.0", "tmp" }; Collection list = FileUtils.listFiles(new File(OUTPUT_DIR_NAME), extensions, false); diff --git a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/ApplicationTest.java similarity index 92% rename from examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/ApplicationTest.java index ea4c3457e1..79c7235ade 100644 --- a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/ApplicationTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/ApplicationTest.java @@ -1,7 +1,7 @@ /** * Put your copyright and license info here. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcToJdbc; import java.io.IOException; @@ -30,7 +30,7 @@ public void testApplication() throws IOException, Exception try { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); - conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-JdbcToJdbcApp.xml")); lma.prepareDAG(new JdbcToJdbcApp(), conf); LocalMode.Controller lc = lma.getController(); lc.run(50000); // runs for 10 seconds and quits diff --git a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/JdbcOperatorTest.java similarity index 90% rename from examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java rename to examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/JdbcOperatorTest.java index f4709ba93c..4ecc52db0b 100644 --- a/examples/jdbcToJdbc/src/test/java/com/example/mydtapp/JdbcOperatorTest.java +++ b/examples/jdbc/src/test/java/org/apache/apex/examples/JdbcToJdbc/JdbcOperatorTest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package com.example.mydtapp; +package org.apache.apex.examples.JdbcToJdbc; import java.sql.Connection; import java.sql.DriverManager; @@ -53,7 +53,12 @@ public class JdbcOperatorTest @BeforeClass public static void setup() { - + try { + dropTable(); + } catch (Exception e) { + throw new RuntimeException(e); + } + try { Class.forName(DB_DRIVER).newInstance(); @@ -70,12 +75,12 @@ public static void setup() System.out.println(createMetaTable); stmt.executeUpdate(createMetaTable); - String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + String createTable = "CREATE TABLE " + TABLE_NAME + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; stmt.executeUpdate(createTable); insertEventsInTable(10, 0); - String createOutputTable = "CREATE TABLE IF NOT EXISTS " + OUTPUT_TABLE_NAME + String createOutputTable = "CREATE TABLE " + OUTPUT_TABLE_NAME + " (ACCOUNT_NO INTEGER, NAME VARCHAR(255),AMOUNT INTEGER)"; stmt.executeUpdate(createOutputTable); } catch (Exception e) { @@ -83,14 +88,14 @@ public static void setup() } } - public static void cleanTable() + public static void dropTable() { try { Connection con = DriverManager.getConnection(URL); Statement stmt = con.createStatement(); - String cleanTable = "delete from " + TABLE_NAME; + String cleanTable = "DROP TABLE IF EXISTS " + TABLE_NAME; stmt.executeUpdate(cleanTable); - String cleanOutputTable = "delete from " + OUTPUT_TABLE_NAME; + String cleanOutputTable = "DROP TABLE IF EXISTS " + OUTPUT_TABLE_NAME; stmt.executeUpdate(cleanOutputTable); } catch (SQLException e) { @@ -137,7 +142,7 @@ public void testApplication() throws Exception try { LocalMode lma = LocalMode.newInstance(); Configuration conf = new Configuration(false); - conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml")); + conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties-JdbcToJdbcApp.xml")); lma.prepareDAG(new JdbcToJdbcApp(), conf); LocalMode.Controller lc = lma.getController(); lc.runAsync(); @@ -146,7 +151,7 @@ public void testApplication() throws Exception Thread.sleep(5000); Assert.assertEquals("Events in store", 10, getNumOfEventsInStore()); - cleanTable(); + dropTable(); } catch (ConstraintViolationException e) { Assert.fail("constraint violations: " + e.getConstraintViolations()); diff --git a/examples/fileToJdbc/src/test/resources/example.sql b/examples/jdbc/src/test/resources/example-FileToJdbcApp.sql similarity index 100% rename from examples/fileToJdbc/src/test/resources/example.sql rename to examples/jdbc/src/test/resources/example-FileToJdbcApp.sql diff --git a/examples/jdbcIngest/src/test/resources/example.sql b/examples/jdbc/src/test/resources/example-JdbcIngest.sql similarity index 100% rename from examples/jdbcIngest/src/test/resources/example.sql rename to examples/jdbc/src/test/resources/example-JdbcIngest.sql diff --git a/examples/jdbcToJdbc/src/test/resources/example.sql b/examples/jdbc/src/test/resources/example-JdbcToJdbc.sql similarity index 100% rename from examples/jdbcToJdbc/src/test/resources/example.sql rename to examples/jdbc/src/test/resources/example-JdbcToJdbc.sql diff --git a/examples/fileToJdbc/src/test/resources/log4j.properties b/examples/jdbc/src/test/resources/log4j.properties old mode 100755 new mode 100644 similarity index 100% rename from examples/fileToJdbc/src/test/resources/log4j.properties rename to examples/jdbc/src/test/resources/log4j.properties diff --git a/examples/fileToJdbc/src/test/resources/test.xml b/examples/jdbc/src/test/resources/test-FileToJdbcApp.xml similarity index 89% rename from examples/fileToJdbc/src/test/resources/test.xml rename to examples/jdbc/src/test/resources/test-FileToJdbcApp.xml index c3a49c4c4a..477cdbf5ef 100755 --- a/examples/fileToJdbc/src/test/resources/test.xml +++ b/examples/jdbc/src/test/resources/test-FileToJdbcApp.xml @@ -32,7 +32,7 @@ dt.operator.JdbcOutput.port.input.attr.TUPLE_CLASS - com.example.FileToJdbcApp.PojoEvent + org.apache.apex.examples.FileToJdbcApp.PojoEvent @@ -40,14 +40,14 @@ src/test/resources/test-input - + dt.application.FileToJdbcCsvParser.operator.CsvParser.port.out.attr.TUPLE_CLASS - com.example.FileToJdbcApp.PojoEvent + org.apache.apex.examples.FileToJdbcApp.PojoEvent diff --git a/examples/fileToJdbc/src/test/resources/test-input/sample.txt b/examples/jdbc/src/test/resources/test-input/sample-FileToJdbc.txt similarity index 100% rename from examples/fileToJdbc/src/test/resources/test-input/sample.txt rename to examples/jdbc/src/test/resources/test-input/sample-FileToJdbc.txt diff --git a/examples/jdbcIngest/.gitignore b/examples/jdbcIngest/.gitignore deleted file mode 100644 index b83d22266a..0000000000 --- a/examples/jdbcIngest/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/examples/jdbcIngest/README.md b/examples/jdbcIngest/README.md deleted file mode 100644 index ec01985957..0000000000 --- a/examples/jdbcIngest/README.md +++ /dev/null @@ -1,65 +0,0 @@ -## Sample mysql implementation - -This project contains two applications to read records from a table in `MySQL`, create POJOs and write them to a file -in the user specified directory in HDFS. - -1. SimpleJdbcToHDFSApp: Reads table records as per given query and emits them as POJOs. -2. PollJdbcToHDFSApp: Reads table records using partitions in parallel fashion also polls for newly **appended** records and emits them as POJOs. - -Follow these steps to run these applications: - -**Step 1**: Update these properties in the file `src/main/resources/META_INF/properties-.xml`: - -| Property Name | Description | -| ------------- | ----------- | -| dt.application..operator.JdbcInput.prop.store.databaseUrl | database URL of the form `jdbc:mysql://hostName:portNumber/dbName` | -| dt.application..operator.JdbcInput.prop.store.userName | MySQL user name | -| dt.application..operator.JdbcInput.prop.store.password | MySQL user password | -| dt.application..operator.FileOutputOperator.filePath | HDFS output directory path | - -**Step 2**: Create database table and add entries - -Go to the MySQL console and run (where _{path}_ is a suitable prefix): - - mysql> source {path}/src/test/resources/example.sql - -After this, please verify that `testDev.test_event_table` is created and has 10 rows: - - mysql> select count(*) from testDev.test_event_table; - +----------+ - | count(*) | - +----------+ - | 10 | - +----------+ - -**Step 3**: Create HDFS output directory if not already present (_{path}_ should be the same as specified in `META_INF/properties-.xml`): - - hadoop fs -mkdir -p {path} - -**Step 4**: Build the code: - - shell> mvn clean install - -Upload the `target/jdbcInput-1.0-SNAPSHOT.apa` to the UI console if available or launch it from -the commandline using `apexcli`. - -**Step 5**: During launch use `src/main/resources/META_INF/properties-.xml` as a custom configuration file; then verify -that the output directory has the expected output: - - shell> hadoop fs -cat /2_op.dat.* | wc -l - -This should return 10 as the count. - -Sample Output: - - hadoop fs -cat /2_op.dat.0 - PojoEvent [accountNumber=1, name=User1, amount=1000] - PojoEvent [accountNumber=2, name=User2, amount=2000] - PojoEvent [accountNumber=3, name=User3, amount=3000] - PojoEvent [accountNumber=4, name=User4, amount=4000] - PojoEvent [accountNumber=5, name=User5, amount=5000] - PojoEvent [accountNumber=6, name=User6, amount=6000] - PojoEvent [accountNumber=7, name=User7, amount=7000] - PojoEvent [accountNumber=8, name=User8, amount=8000] - PojoEvent [accountNumber=9, name=User9, amount=9000] - PojoEvent [accountNumber=10, name=User10, amount=1000] diff --git a/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl b/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl deleted file mode 100644 index 08075a98d3..0000000000 --- a/examples/jdbcIngest/XmlJavadocCommentsExtractor.xsl +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/examples/jdbcIngest/pom.xml b/examples/jdbcIngest/pom.xml deleted file mode 100644 index f9288b8284..0000000000 --- a/examples/jdbcIngest/pom.xml +++ /dev/null @@ -1,298 +0,0 @@ - - - 4.0.0 - - com.example - 1.0-SNAPSHOT - jdbcInput - jar - - - JDBC Input Operator - Example Uses of JDBC Input Operator - - - - 3.5.0 - lib/*.jar - 3.6.0 - - - - - - org.apache.maven.plugins - maven-eclipse-plugin - 2.9 - - true - - - - maven-compiler-plugin - 3.3 - - UTF-8 - 1.7 - 1.7 - true - false - true - true - - - - maven-dependency-plugin - 2.8 - - - copy-dependencies - prepare-package - - copy-dependencies - - - target/deps - runtime - - - - - - - maven-assembly-plugin - - - app-package-assembly - package - - single - - - ${project.artifactId}-${project.version}-apexapp - false - - src/assemble/appPackage.xml - - - 0755 - - - - ${apex.apppackage.classpath} - ${apex.version} - ${project.groupId} - ${project.artifactId} - ${project.version} - ${project.name} - ${project.description} - - - - - - - - - maven-antrun-plugin - 1.7 - - - package - - - - - - - run - - - - - createJavadocDirectory - generate-resources - - - - - - - - run - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.9.1 - - - attach-artifacts - package - - attach-artifact - - - - - target/${project.artifactId}-${project.version}.apa - apa - - - false - - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - - xml-doclet - generate-resources - - javadoc - - - com.github.markusbernhardt.xmldoclet.XmlDoclet - -d - ${project.build.directory}/generated-resources/xml-javadoc - -filename - ${project.artifactId}-${project.version}-javadoc.xml - false - - com.github.markusbernhardt - xml-doclet - 1.0.4 - - - - - - - - org.codehaus.mojo - xml-maven-plugin - 1.0 - - - transform-xmljavadoc - generate-resources - - transform - - - - - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - XmlJavadocCommentsExtractor.xsl - ${project.build.directory}/generated-resources/xml-javadoc - - - - - - - maven-resources-plugin - 2.6 - - - copy-resources - process-resources - - copy-resources - - - ${basedir}/target/classes - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - true - - - - - - - - - - - - - - - org.apache.apex - malhar-library - ${malhar.version} - - - * - * - - - - - org.apache.apex - apex-common - ${apex.version} - provided - - - junit - junit - 4.10 - test - - - org.apache.apex - apex-engine - ${apex.version} - test - - - mysql - mysql-connector-java - 5.1.36 - - - org.jooq - jooq - 3.6.4 - - - org.codehaus.janino - janino - 2.7.8 - - - org.hsqldb - hsqldb - 2.3.1 - - - - - diff --git a/examples/jdbcIngest/src/assemble/appPackage.xml b/examples/jdbcIngest/src/assemble/appPackage.xml deleted file mode 100644 index 7ad071c69f..0000000000 --- a/examples/jdbcIngest/src/assemble/appPackage.xml +++ /dev/null @@ -1,43 +0,0 @@ - - appPackage - - jar - - false - - - ${basedir}/target/ - /app - - ${project.artifactId}-${project.version}.jar - - - - ${basedir}/target/deps - /lib - - - ${basedir}/src/site/conf - /conf - - *.xml - - - - ${basedir}/src/main/resources/META-INF - /META-INF - - - ${basedir}/src/main/resources/app - /app - - - ${basedir}/src/main/resources/resources - /resources - - - - - diff --git a/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml b/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml deleted file mode 100644 index 6e7aaf65a2..0000000000 --- a/examples/jdbcIngest/src/main/resources/META-INF/properties-PollJdbcToHDFSApp.xml +++ /dev/null @@ -1,73 +0,0 @@ - - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.partitionCount - 2 - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseDriver - com.mysql.jdbc.Driver - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.databaseUrl - jdbc:mysql://localhost:3306/testDev - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.userName - root - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.store.password - mysql - - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.batchSize - 300 - - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.key - ACCOUNT_NO - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.columnsExpression - ACCOUNT_NO,NAME,AMOUNT - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.port.outputPort.attr.TUPLE_CLASS - com.example.mydtapp.PojoEvent - - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.tableName - test_event_table - - - - dt.application.PollJdbcToHDFSApp.operator.JdbcPoller.prop.pollInterval - 1000 - - - - - dt.application.PollJdbcToHDFSApp.operator.Writer.filePath - /tmp/test/output - - - - dt.loggers.level - com.datatorrent.*:DEBUG,org.apache.*:INFO - - diff --git a/examples/jdbcIngest/src/test/resources/log4j.properties b/examples/jdbcIngest/src/test/resources/log4j.properties deleted file mode 100644 index 3bfcdc5517..0000000000 --- a/examples/jdbcIngest/src/test/resources/log4j.properties +++ /dev/null @@ -1,21 +0,0 @@ -log4j.rootLogger=DEBUG,CONSOLE - -log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender -log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout -log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n - -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n -log4j.appender.RFA.File=/tmp/app.log - -# to enable, add SYSLOG to rootLogger -log4j.appender.SYSLOG=org.apache.log4j.net.SyslogAppender -log4j.appender.SYSLOG.syslogHost=127.0.0.1 -log4j.appender.SYSLOG.layout=org.apache.log4j.PatternLayout -log4j.appender.SYSLOG.layout.conversionPattern=${dt.cid} %-5p [%t] %c{2} %x - %m%n -log4j.appender.SYSLOG.Facility=LOCAL1 - -log4j.logger.org=info -#log4j.logger.org.apache.commons.beanutils=warn -log4j.logger.com.datatorrent=debug diff --git a/examples/jdbcToJdbc/.gitignore b/examples/jdbcToJdbc/.gitignore deleted file mode 100644 index b83d22266a..0000000000 --- a/examples/jdbcToJdbc/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target/ diff --git a/examples/jdbcToJdbc/README.md b/examples/jdbcToJdbc/README.md deleted file mode 100644 index 562de69258..0000000000 --- a/examples/jdbcToJdbc/README.md +++ /dev/null @@ -1,55 +0,0 @@ -JdbcToJdbc App - -This application reads from a source table in MySQL, creates POJO's and writes the POJO's to another table in MySQL. - -Steps : - -Step 1 : Update the below properties in the properties file - src/site/conf/example.xml - -1.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.databaseUrl -- data base URL of the form jdbc:mysql://hostName:portNumber/dbName -2.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.userName -- mysql user name -3.dt.application.JdbcToJdbcApp.operator.JdbcInput.prop.store.password -- password -4.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.databaseUrl -- data base URL of the form jdbc:mysql://hostName:portNumber/dbName -5.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.userName -- mysql user name -6.dt.application.JdbcToJdbcApp.operator.JdbcOutput.prop.store.password -- password - -Step 2: Create database, table and add entries - -Go to mysql console and run the below command, -mysql> source src/test/resources/example.sql - -After this is done, please verify that testDev.test_event_table is created and has 10 rows.It will also create an output table by the name testDev.test_output_event_table - -mysql> select count(*) from testDev.test_event_table; -+----------+ -| count(*) | -+----------+ -| 10 | -+----------+ - -Step 3: Build the code, -shell> mvn clean install - -Upload the target/jdbcInput-1.0-SNAPSHOT.apa to the gateway - -Step 4 : During launch use "Specify custom properties" option and select example.xml - -Verification : - -Log on to the mysql console - -mysql> select count(*) from testDev.test_event_table; -+----------+ -| count(*) | -+----------+ -| 10 | -+----------+ - - - diff --git a/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl b/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl deleted file mode 100644 index 08075a98d3..0000000000 --- a/examples/jdbcToJdbc/XmlJavadocCommentsExtractor.xsl +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/examples/jdbcToJdbc/pom.xml b/examples/jdbcToJdbc/pom.xml deleted file mode 100644 index 8ed69d8639..0000000000 --- a/examples/jdbcToJdbc/pom.xml +++ /dev/null @@ -1,319 +0,0 @@ - - - 4.0.0 - - com.example - 1.0-SNAPSHOT - jdbcToJdbc - jar - - - JDBC Input Operator - Example Use of JDBC Input Operator - - - - 3.5.0 - lib/*.jar - 3.6.0 - lib/*.jar - lib/*.jar - - - - - false - - Datatorrent-Releases - DataTorrent Release Repository - https://www.datatorrent.com/maven/content/repositories/releases/ - - - - - - - org.apache.maven.plugins - maven-eclipse-plugin - 2.9 - - true - - - - maven-compiler-plugin - 3.3 - - UTF-8 - 1.7 - 1.7 - true - false - true - true - - - - maven-dependency-plugin - 2.8 - - - copy-dependencies - prepare-package - - copy-dependencies - - - target/deps - runtime - - - - - - - maven-assembly-plugin - - - app-package-assembly - package - - single - - - ${project.artifactId}-${project.version}-apexapp - false - - src/assemble/appPackage.xml - - - 0755 - - - - ${apex.apppackage.classpath} - ${apex.version} - ${project.groupId} - ${project.artifactId} - ${project.version} - ${project.name} - ${project.description} - - - - - - - - - maven-antrun-plugin - 1.7 - - - package - - - - - - - run - - - - - createJavadocDirectory - generate-resources - - - - - - - - run - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 1.9.1 - - - attach-artifacts - package - - attach-artifact - - - - - target/${project.artifactId}-${project.version}.apa - apa - - - false - - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - - xml-doclet - generate-resources - - javadoc - - - com.github.markusbernhardt.xmldoclet.XmlDoclet - -d - ${project.build.directory}/generated-resources/xml-javadoc - -filename - ${project.artifactId}-${project.version}-javadoc.xml - false - - com.github.markusbernhardt - xml-doclet - 1.0.4 - - - - - - - - org.codehaus.mojo - xml-maven-plugin - 1.0 - - - transform-xmljavadoc - generate-resources - - transform - - - - - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - XmlJavadocCommentsExtractor.xsl - ${project.build.directory}/generated-resources/xml-javadoc - - - - - - - maven-resources-plugin - 2.6 - - - copy-resources - process-resources - - copy-resources - - - ${basedir}/target/classes - - - ${project.build.directory}/generated-resources/xml-javadoc - - ${project.artifactId}-${project.version}-javadoc.xml - - true - - - - - - - - - - - - - - - org.apache.apex - malhar-library - ${malhar.version} - - - - * - * - - - - - - org.apache.apex - apex-common - ${apex.version} - provided - - - junit - junit - 4.10 - test - - - - org.apache.apex - apex-engine - ${apex.version} - test - - - - mysql - mysql-connector-java - 5.1.36 - - - - org.codehaus.janino - janino - 2.7.8 - - - - org.codehaus.janino - commons-compiler - 2.7.8 - - - - org.hsqldb - hsqldb - 2.3.1 - test - - - - diff --git a/examples/jdbcToJdbc/src/assemble/appPackage.xml b/examples/jdbcToJdbc/src/assemble/appPackage.xml deleted file mode 100644 index 7ad071c69f..0000000000 --- a/examples/jdbcToJdbc/src/assemble/appPackage.xml +++ /dev/null @@ -1,43 +0,0 @@ - - appPackage - - jar - - false - - - ${basedir}/target/ - /app - - ${project.artifactId}-${project.version}.jar - - - - ${basedir}/target/deps - /lib - - - ${basedir}/src/site/conf - /conf - - *.xml - - - - ${basedir}/src/main/resources/META-INF - /META-INF - - - ${basedir}/src/main/resources/app - /app - - - ${basedir}/src/main/resources/resources - /resources - - - - - diff --git a/examples/jdbcToJdbc/src/test/resources/log4j.properties b/examples/jdbcToJdbc/src/test/resources/log4j.properties deleted file mode 100644 index 3bfcdc5517..0000000000 --- a/examples/jdbcToJdbc/src/test/resources/log4j.properties +++ /dev/null @@ -1,21 +0,0 @@ -log4j.rootLogger=DEBUG,CONSOLE - -log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender -log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout -log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n - -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} %M - %m%n -log4j.appender.RFA.File=/tmp/app.log - -# to enable, add SYSLOG to rootLogger -log4j.appender.SYSLOG=org.apache.log4j.net.SyslogAppender -log4j.appender.SYSLOG.syslogHost=127.0.0.1 -log4j.appender.SYSLOG.layout=org.apache.log4j.PatternLayout -log4j.appender.SYSLOG.layout.conversionPattern=${dt.cid} %-5p [%t] %c{2} %x - %m%n -log4j.appender.SYSLOG.Facility=LOCAL1 - -log4j.logger.org=info -#log4j.logger.org.apache.commons.beanutils=warn -log4j.logger.com.datatorrent=debug diff --git a/examples/pom.xml b/examples/pom.xml index 180d7c96bd..1eb405d65d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -85,6 +85,7 @@ + maven-assembly-plugin @@ -200,6 +201,7 @@ kafka ftp s3 + jdbc From fc234b1df959e96e1ca97ee98d364d39c87bfaaa Mon Sep 17 00:00:00 2001 From: Apex Dev Date: Tue, 18 Apr 2017 13:40:22 -0700 Subject: [PATCH 4/4] APEXMALHAR-2462 Fixed Checkstyle and Log4j Properties. --- examples/jdbc/pom.xml | 20 +++++++++ examples/jdbc/src/assemble/appPackage.xml | 20 +++++++++ .../examples/FileToJdbcApp/CustomParser.java | 26 +++++------ .../examples/FileToJdbcApp/FileReader.java | 23 +++++++++- .../FileToJdbcApp/FileToJdbcCsvParser.java | 30 ++++++++++--- .../FileToJdbcApp/FileToJdbcCustomParser.java | 37 ++++++++++++---- .../examples/FileToJdbcApp/PojoEvent.java | 18 ++++++++ .../JdbcIngest/JdbcPollerApplication.java | 18 ++++++++ .../apex/examples/JdbcIngest/PojoEvent.java | 18 ++++++++ .../examples/JdbcToJdbc/JdbcToJdbcApp.java | 2 +- .../apex/examples/JdbcToJdbc/PojoEvent.java | 18 ++++++++ .../META-INF/properties-FileToJdbcApp.xml | 20 +++++++++ .../META-INF/properties-JdbcToJdbcApp.xml | 20 +++++++++ .../META-INF/properties-PollJdbcToHDFSApp.xml | 20 +++++++++ .../properties-SimpleJdbcToHDFSApp.xml | 20 +++++++++ .../FileToJdbcApp/ApplicationTest.java | 44 ++++++++++++++----- .../examples/JdbcIngest/JdbcInputAppTest.java | 4 +- .../JdbcIngest/JdbcPollerApplicationTest.java | 22 +++++++++- .../examples/JdbcToJdbc/ApplicationTest.java | 17 ++++++- .../examples/JdbcToJdbc/JdbcOperatorTest.java | 7 ++- ...cApp.sql => example-FileToJdbcApp-sql.txt} | 0 ...cIngest.sql => example-JdbcIngest-sql.txt} | 0 ...cToJdbc.sql => example-JdbcToJdbc-sql.txt} | 0 .../jdbc/src/test/resources/log4j.properties | 21 +++++++++ .../src/test/resources/test-FileToJdbcApp.xml | 20 +++++++++ 25 files changed, 396 insertions(+), 49 deletions(-) rename examples/jdbc/src/test/resources/{example-FileToJdbcApp.sql => example-FileToJdbcApp-sql.txt} (100%) rename examples/jdbc/src/test/resources/{example-JdbcIngest.sql => example-JdbcIngest-sql.txt} (100%) rename examples/jdbc/src/test/resources/{example-JdbcToJdbc.sql => example-JdbcToJdbc-sql.txt} (100%) diff --git a/examples/jdbc/pom.xml b/examples/jdbc/pom.xml index b697a689c5..b01028a669 100644 --- a/examples/jdbc/pom.xml +++ b/examples/jdbc/pom.xml @@ -1,4 +1,24 @@ + 4.0.0 diff --git a/examples/jdbc/src/assemble/appPackage.xml b/examples/jdbc/src/assemble/appPackage.xml index 7ad071c69f..a8708074a2 100644 --- a/examples/jdbc/src/assemble/appPackage.xml +++ b/examples/jdbc/src/assemble/appPackage.xml @@ -1,3 +1,23 @@ + diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java index a22acc98a0..d5fc509d58 100755 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/CustomParser.java @@ -18,15 +18,14 @@ */ package org.apache.apex.examples.FileToJdbcApp; +import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.DefaultInputPort; import com.datatorrent.api.DefaultOutputPort; import com.datatorrent.api.annotation.OutputPortFieldAnnotation; import com.datatorrent.common.util.BaseOperator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.regex.Pattern; // parse input line into pojo event public class CustomParser extends BaseOperator @@ -42,12 +41,11 @@ public class CustomParser extends BaseOperator @OutputPortFieldAnnotation(optional = false) public final transient DefaultOutputPort output = new DefaultOutputPort<>(); - public final transient DefaultInputPort - input = new DefaultInputPort() { - - @Override - public void process(String line) - { + public final transient DefaultInputPort input = new DefaultInputPort() + { + @Override + public void process(String line) + { // use custom regex to split line into words final String[] words = regexPattern.split(line); @@ -70,14 +68,16 @@ public void process(String line) pojo.setAmount(0); } output.emit(pojo); - } + } }; - public String getRegexStr() { + public String getRegexStr() + { return this.regexStr; } - public void setRegexStr(String regex) { + public void setRegexStr(String regex) + { this.regexStr = regex; } diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java index 4065a5fc83..89515f70e5 100755 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileReader.java @@ -1,10 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.FileToJdbcApp; -import com.datatorrent.api.DefaultOutputPort; import org.apache.apex.malhar.lib.fs.LineByLineFileInputOperator; -public class FileReader extends LineByLineFileInputOperator{ +import com.datatorrent.api.DefaultOutputPort; +public class FileReader extends LineByLineFileInputOperator +{ /** * output in bytes to match CsvParser input type */ diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java index d60b275756..be1db6b50a 100755 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCsvParser.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.FileToJdbcApp; import java.util.List; @@ -19,10 +37,11 @@ import static java.sql.Types.VARCHAR; @ApplicationAnnotation(name = "FileToJdbcCsvParser") -public class FileToJdbcCsvParser implements StreamingApplication{ - +public class FileToJdbcCsvParser implements StreamingApplication +{ @Override - public void populateDAG(DAG dag, Configuration configuration) { + public void populateDAG(DAG dag, Configuration configuration) + { // create operators FileReader fileReader = dag.addOperator("FileReader", FileReader.class); CsvParser csvParser = dag.addOperator("CsvParser", CsvParser.class); @@ -45,9 +64,10 @@ public void populateDAG(DAG dag, Configuration configuration) { * This method can be modified to have field mappings based on used defined * class */ - private List addFieldInfos() { + private List addFieldInfos() + { List fieldInfos = Lists.newArrayList(); - fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER , INTEGER)); + fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); fieldInfos.add(new JdbcFieldInfo("NAME", "name", JdbcFieldInfo.SupportType.STRING, VARCHAR)); fieldInfos.add(new JdbcFieldInfo("AMOUNT", "amount", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); return fieldInfos; diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java index 42d1d235fb..037ca51063 100755 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/FileToJdbcCustomParser.java @@ -1,24 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.FileToJdbcApp; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +import com.google.common.collect.Lists; + import com.datatorrent.api.DAG; import com.datatorrent.api.StreamingApplication; import com.datatorrent.api.annotation.ApplicationAnnotation; import com.datatorrent.lib.db.jdbc.JdbcFieldInfo; import com.datatorrent.lib.db.jdbc.JdbcPOJOInsertOutputOperator; import com.datatorrent.lib.db.jdbc.JdbcTransactionalStore; -import com.google.common.collect.Lists; -import org.apache.hadoop.conf.Configuration; - -import java.util.List; import static java.sql.Types.INTEGER; import static java.sql.Types.VARCHAR; @ApplicationAnnotation(name = "FileToJdbcCustomParser") -public class FileToJdbcCustomParser implements StreamingApplication{ - +public class FileToJdbcCustomParser implements StreamingApplication +{ @Override - public void populateDAG(DAG dag, Configuration configuration) { + public void populateDAG(DAG dag, Configuration configuration) + { // create operators FileReader fileReader = dag.addOperator("FileReader", FileReader.class); CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class); @@ -41,7 +62,7 @@ public void populateDAG(DAG dag, Configuration configuration) { private List addFieldInfos() { List fieldInfos = Lists.newArrayList(); - fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER , INTEGER)); + fieldInfos.add(new JdbcFieldInfo("ACCOUNT_NO", "accountNumber", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); fieldInfos.add(new JdbcFieldInfo("NAME", "name", JdbcFieldInfo.SupportType.STRING, VARCHAR)); fieldInfos.add(new JdbcFieldInfo("AMOUNT", "amount", JdbcFieldInfo.SupportType.INTEGER, INTEGER)); return fieldInfos; diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java index 6de92744bc..2ce2d9f6d0 100755 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/FileToJdbcApp/PojoEvent.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.FileToJdbcApp; public class PojoEvent diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java index de47ef4cfe..4f351ef194 100644 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.JdbcIngest; import java.util.List; diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java index be366b6ac3..5a6646428a 100644 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/PojoEvent.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.JdbcIngest; public class PojoEvent diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java index e85e4affe9..53a41eaa5a 100644 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/JdbcToJdbcApp.java @@ -84,7 +84,7 @@ private List addJdbcFieldInfos() fieldInfos.add(new com.datatorrent.lib.db.jdbc.JdbcFieldInfo("AMOUNT", "amount", SupportType.INTEGER,0)); return fieldInfos; } - + /** * This method can be modified to have field mappings based on used defined * class diff --git a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java index 0abf74d8d2..f40d9302a9 100644 --- a/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java +++ b/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcToJdbc/PojoEvent.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.apex.examples.JdbcToJdbc; public class PojoEvent diff --git a/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml b/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml index 29b911b5e0..53374fc7a8 100755 --- a/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-FileToJdbcApp.xml @@ -1,4 +1,24 @@ + dt.operator.JdbcOutput.prop.store.databaseDriver diff --git a/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml b/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml index d9199c67e3..b32f8cc58e 100644 --- a/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-JdbcToJdbcApp.xml @@ -1,4 +1,24 @@ + diff --git a/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml b/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml index 589dbcda5f..a57dcf735b 100644 --- a/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml +++ b/examples/jdbc/src/main/resources/META-INF/properties-SimpleJdbcToHDFSApp.xml @@ -1,4 +1,24 @@ + dt.operator.JdbcOutput.prop.store.databaseDriver