http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/build-tools/src/patches/OOZIE-1551-4.0.patch ---------------------------------------------------------------------- diff --git a/build-tools/src/patches/OOZIE-1551-4.0.patch b/build-tools/src/patches/OOZIE-1551-4.0.patch deleted file mode 100644 index 965d475..0000000 --- a/build-tools/src/patches/OOZIE-1551-4.0.patch +++ /dev/null @@ -1,81 +0,0 @@ -diff --git a/hadooplibs/hadoop-2/pom.xml b/hadooplibs/hadoop-2/pom.xml -index 83c209e..e70847e 100644 ---- a/hadooplibs/hadoop-2/pom.xml -+++ b/hadooplibs/hadoop-2/pom.xml -@@ -27,7 +27,7 @@ - </parent> - <groupId>org.apache.oozie</groupId> - <artifactId>oozie-hadoop</artifactId> -- <version>2.2.0-SNAPSHOT.oozie-4.0.0-falcon</version> -+ <version>2.2.0.oozie-4.0.0-falcon</version> - <description>Apache Oozie Hadoop ${project.version}</description> - <name>Apache Oozie Hadoop ${project.version}</name> - <packaging>jar</packaging> -@@ -36,7 +36,7 @@ - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> -- <version>2.2.0-SNAPSHOT</version> -+ <version>2.2.0</version> - <scope>compile</scope> - </dependency> - </dependencies> -diff --git a/hadooplibs/hadoop-distcp-2/pom.xml b/hadooplibs/hadoop-distcp-2/pom.xml -index b9948fb..d60c561 100644 ---- a/hadooplibs/hadoop-distcp-2/pom.xml -+++ b/hadooplibs/hadoop-distcp-2/pom.xml -@@ -27,7 +27,7 @@ - </parent> - <groupId>org.apache.oozie</groupId> - <artifactId>oozie-hadoop-distcp</artifactId> -- <version>2.2.0-SNAPSHOT.oozie-4.0.0-falcon</version> -+ <version>2.2.0.oozie-4.0.0-falcon</version> - <description>Apache Oozie Hadoop Distcp ${project.version}</description> - <name>Apache Oozie Hadoop Distcp ${project.version}</name> - <packaging>jar</packaging> -@@ -36,7 +36,7 @@ - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-distcp</artifactId> -- <version>2.2.0-SNAPSHOT</version> -+ <version>2.2.0</version> - <scope>compile</scope> - </dependency> - </dependencies> -diff --git a/hadooplibs/hadoop-test-2/pom.xml b/hadooplibs/hadoop-test-2/pom.xml -index 499871a..3af7e9f 100644 ---- a/hadooplibs/hadoop-test-2/pom.xml -+++ b/hadooplibs/hadoop-test-2/pom.xml -@@ -27,7 +27,7 @@ - </parent> - <groupId>org.apache.oozie</groupId> - <artifactId>oozie-hadoop-test</artifactId> -- <version>2.2.0-SNAPSHOT.oozie-4.0.0-falcon</version> -+ <version>2.2.0.oozie-4.0.0-falcon</version> - <description>Apache Oozie Hadoop ${project.version} Test</description> - <name>Apache Oozie Hadoop ${project.version} Test</name> - <packaging>jar</packaging> -@@ -36,7 +36,7 @@ - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minicluster</artifactId> -- <version>2.2.0-SNAPSHOT</version> -+ <version>2.2.0</version> - <scope>compile</scope> - </dependency> - </dependencies> -diff --git a/pom.xml b/pom.xml -index 73cedcf..f8fa3b4 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -1002,8 +1002,8 @@ - <activeByDefault>false</activeByDefault> - </activation> - <properties> -- <hadoop.version>2.2.0-SNAPSHOT</hadoop.version> -- <hadoop.auth.version>2.2.0-SNAPSHOT</hadoop.auth.version> -+ <hadoop.version>2.2.0</hadoop.version> -+ <hadoop.auth.version>2.2.0</hadoop.auth.version> - <pig.classifier>h2</pig.classifier> - <sqoop.classifier>hadoop200</sqoop.classifier> - </properties>
http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/build-tools/src/patches/OOZIE-1741.patch ---------------------------------------------------------------------- diff --git a/build-tools/src/patches/OOZIE-1741.patch b/build-tools/src/patches/OOZIE-1741.patch deleted file mode 100644 index e69b2d9..0000000 --- a/build-tools/src/patches/OOZIE-1741.patch +++ /dev/null @@ -1,397 +0,0 @@ -diff --git core/src/main/java/org/apache/oozie/coord/HCatELFunctions.java core/src/main/java/org/apache/oozie/coord/HCatELFunctions.java -index e5f0146..9a36af0 100644 ---- core/src/main/java/org/apache/oozie/coord/HCatELFunctions.java -+++ core/src/main/java/org/apache/oozie/coord/HCatELFunctions.java -@@ -115,6 +115,12 @@ public class HCatELFunctions { - return echoUnResolved("dataOutPartitions", "'" + dataOutName + "'"); - } - -+ public static String ph1_coord_dataInPartitions_echo(String dataInName, String type) { -+ // Checking if the dataIn/dataOut is correct? -+ isValidDataEvent(dataInName); -+ return echoUnResolved("dataInPartitions", "'" + dataInName + "', '" + type + "'"); -+ } -+ - public static String ph1_coord_dataOutPartitionValue_echo(String dataOutName, String partition) { - // Checking if the dataIn/dataOut is correct? - isValidDataEvent(dataOutName); -@@ -266,6 +272,47 @@ public class HCatELFunctions { - } - - /** -+ * Used to specify the entire HCat partition defining input for workflow job. <p/> Look for two evaluator-level -+ * variables <p/> A) .datain.<DATAIN_NAME> B) .datain.<DATAIN_NAME>.unresolved <p/> A defines the data-in HCat URI. -+ * <p/> B defines whether there are any unresolved EL-function (i.e latest) <p/> If there are something unresolved, -+ * this function will echo back the original function <p/> otherwise it sends the partition. -+ * -+ * @param dataInName : DataIn name -+ * @param type : for action type: hive-export -+ */ -+ public static String ph3_coord_dataInPartitions(String dataInName, String type) { -+ ELEvaluator eval = ELEvaluator.getCurrent(); -+ String uri = (String) eval.getVariable(".datain." + dataInName); -+ Boolean unresolved = (Boolean) eval.getVariable(".datain." + dataInName + ".unresolved"); -+ if (unresolved != null && unresolved.booleanValue() == true) { -+ return "${coord:dataInPartitions('" + dataInName + "', '" + type + "')}"; -+ } -+ String partitionValue = null; -+ if (uri != null) { -+ if (type.equals("hive-export")) { -+ String[] uriList = uri.split(CoordELFunctions.DIR_SEPARATOR); -+ if (uriList.length > 1) { -+ throw new RuntimeException("Multiple partitions not supported for hive-export type. Dataset name: " -+ + dataInName + " URI: " + uri); -+ } -+ try { -+ partitionValue = new HCatURI(uri).toPartitionValueString(type); -+ } -+ catch (URISyntaxException e) { -+ throw new RuntimeException("Parsing exception for HCatURI " + uri, e); -+ } -+ } else { -+ throw new RuntimeException("Unsupported type: " + type + " dataset name: " + dataInName); -+ } -+ } -+ else { -+ XLog.getLog(HCatELFunctions.class).warn("URI is null"); -+ return null; -+ } -+ return partitionValue; -+ } -+ -+ /** - * Used to specify the MAXIMUM value of an HCat partition which is input dependency for workflow job.<p/> Look for two evaluator-level - * variables <p/> A) .datain.<DATAIN_NAME> B) .datain.<DATAIN_NAME>.unresolved <p/> A defines the current list of - * HCat URIs. <p/> B defines whether there are any unresolved EL-function (i.e latest) <p/> If there are something -diff --git core/src/main/resources/oozie-default.xml core/src/main/resources/oozie-default.xml -index 455ef9d..889f10d 100644 ---- core/src/main/resources/oozie-default.xml -+++ core/src/main/resources/oozie-default.xml -@@ -837,6 +837,7 @@ - coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionFilter_echo, - coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMin_echo, - coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitionMax_echo, -+ coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataInPartitions_echo, - coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitions_echo, - coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph1_coord_dataOutPartitionValue_echo - </value> -@@ -1101,6 +1102,7 @@ - coord:dataInPartitionFilter=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionFilter, - coord:dataInPartitionMin=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMin, - coord:dataInPartitionMax=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitionMax, -+ coord:dataInPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataInPartitions, - coord:dataOutPartitions=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitions, - coord:dataOutPartitionValue=org.apache.oozie.coord.HCatELFunctions#ph3_coord_dataOutPartitionValue - </value> -diff --git core/src/test/java/org/apache/oozie/coord/TestHCatELFunctions.java core/src/test/java/org/apache/oozie/coord/TestHCatELFunctions.java -index f46f1ec..fac2177 100644 ---- core/src/test/java/org/apache/oozie/coord/TestHCatELFunctions.java -+++ core/src/test/java/org/apache/oozie/coord/TestHCatELFunctions.java -@@ -264,6 +264,38 @@ public class TestHCatELFunctions extends XHCatTestCase { - } - - /** -+ * Test HCat dataInPartition EL function (phase 1) which echo back the EL -+ * function itself -+ * -+ * @throws Exception -+ */ -+ @Test -+ public void testDataInPartitionsPh1() throws Exception { -+ init("coord-job-submit-data"); -+ String expr = "${coord:dataInPartitions('ABC', 'hive-export')}"; -+ // +ve test -+ eval.setVariable("oozie.dataname.ABC", "data-in"); -+ assertEquals("${coord:dataInPartitions('ABC', 'hive-export')}", CoordELFunctions.evalAndWrap(eval, expr)); -+ // -ve test -+ expr = "${coord:dataInPartitions('ABCD', 'hive-export')}"; -+ try { -+ CoordELFunctions.evalAndWrap(eval, expr); -+ fail("should throw exception because Data-in is not defined"); -+ } -+ catch (Exception ex) { -+ } -+ // -ve test -+ expr = "${coord:dataInPartitions('ABCD')}"; -+ eval.setVariable("oozie.dataname.ABCD", "data-in"); -+ try { -+ CoordELFunctions.evalAndWrap(eval, expr); -+ fail("should throw exception because EL function requires 2 parameters"); -+ } -+ catch (Exception ex) { -+ } -+ } -+ -+ /** - * Test HCat dataOutPartition EL function (phase 1) which echo back the EL - * function itself - * -@@ -463,6 +495,31 @@ public class TestHCatELFunctions extends XHCatTestCase { - assertTrue(res.equals("20")); - } - -+ /** -+ * Test dataInPartitions EL function (phase 3) which returns the complete partition value string of a single partition -+ * in case of hive-export type. -+ * -+ * @throws Exception -+ */ -+ @Test -+ public void testDataInPartitions() throws Exception { -+ init("coord-action-start"); -+ String expr = "${coord:dataInPartitions('ABC', 'hive-export')}"; -+ eval.setVariable(".datain.ABC", "hcat://hcat.server.com:5080/mydb/clicks/datastamp=20120230;region=us"); -+ eval.setVariable(".datain.ABC.unresolved", Boolean.FALSE); -+ String res = CoordELFunctions.evalAndWrap(eval, expr); -+ assertTrue(res.equals("datastamp='20120230',region='us'") || res.equals("region='us',datastamp='20120230'")); -+ // -ve test; execute EL function with any other type than hive-export -+ try { -+ expr = "${coord:dataInPartitions('ABC', 'invalid-type')}"; -+ eval.setVariable(".datain.ABC", "hcat://hcat.server.com:5080/mydb/clicks/datastamp=20120230;region=us"); -+ eval.setVariable(".datain.ABC.unresolved", Boolean.FALSE); -+ res = CoordELFunctions.evalAndWrap(eval, expr); -+ fail("EL function should throw exception because of invalid type"); -+ } catch (Exception e) { -+ } -+ } -+ - private void init(String tag) throws Exception { - init(tag, "hdfs://localhost:9000/user/" + getTestUser() + "/US/${YEAR}/${MONTH}/${DAY}"); - } -diff --git docs/src/site/twiki/CoordinatorFunctionalSpec.twiki docs/src/site/twiki/CoordinatorFunctionalSpec.twiki -index a5ecbc5..621bd3d 100644 ---- docs/src/site/twiki/CoordinatorFunctionalSpec.twiki -+++ docs/src/site/twiki/CoordinatorFunctionalSpec.twiki -@@ -2608,6 +2608,192 @@ C = foreach B generate foo, bar; - store C into 'myOutputDatabase.myOutputTable' using org.apache.hcatalog.pig.HCatStorer('region=APAC,datestamp=20090102'); - </blockquote> - -+---++++ 6.8.8 coord:dataInPartitions(String name, String type) EL function -+ -+The =${coord:dataInPartitions(String name, String type)}= EL function resolves to a list of partition key-value -+pairs for the input-event dataset. Currently the only type supported is 'hive-export'. The 'hive-export' type -+supports only one partition instance and it can be used to create the complete partition value string that can -+be used in a hive query for partition export/import. -+ -+The example below illustrates a hive export-import job triggered by a coordinator, using the EL functions for HCat database, -+table, input partitions. The example replicates the hourly processed data across hive tables. -+ -+*%GREEN% Example: %ENDCOLOR%* -+ -+#HCatHiveExampleOne -+ -+*Coordinator application definition:* -+ -+<blockquote> -+ <coordinator-app xmlns="uri:oozie:coordinator:0.3" name="app-coord" -+ frequency="${coord:hours(1)}" start="2014-03-28T08:00Z" -+ end="2030-01-01T00:00Z" timezone="UTC"> -+ -+ <datasets> -+ <dataset name="Stats-1" frequency="${coord:hours(1)}" -+ initial-instance="2014-03-28T08:00Z" timezone="UTC"> -+ <uri-template>hcat://foo:11002/myInputDatabase1/myInputTable1/year=${YEAR};month=${MONTH};day=${DAY};hour=${HOUR} -+ </uri-template> -+ </dataset> -+ <dataset name="Stats-2" frequency="${coord:hours(1)}" -+ initial-instance="2014-03-28T08:00Z" timezone="UTC"> -+ <uri-template>hcat://foo:11002/myInputDatabase2/myInputTable2/year=${YEAR};month=${MONTH};day=${DAY};hour=${HOUR} -+ </uri-template> -+ </dataset> -+ </datasets> -+ <input-events> -+ <data-in name="processed-logs-1" dataset="Stats-1"> -+ <instance>${coord:current(0)}</instance> -+ </data-in> -+ </input-events> -+ <output-events> -+ <data-out name="processed-logs-2" dataset="Stats-2"> -+ <instance>${coord:current(0)}</instance> -+ </data-out> -+ </output-events> -+ <action> -+ <workflow> -+ <app-path>hdfs://bar:8020/usr/joe/logsreplicator-wf</app-path> -+ <configuration> -+ <property> -+ <name>EXPORT_DB</name> -+ <value>${coord:databaseIn('processed-logs-1')}</value> -+ </property> -+ <property> -+ <name>EXPORT_TABLE</name> -+ <value>${coord:tableIn('processed-logs-1')}</value> -+ </property> -+ <property> -+ <name>IMPORT_DB</name> -+ <value>${coord:databaseOut('processed-logs-2')}</value> -+ </property> -+ <property> -+ <name>IMPORT_TABLE</name> -+ <value>${coord:tableOut('processed-logs-2')}</value> -+ </property> -+ <property> -+ <name>EXPORT_PARTITION</name> -+ <value>${coord:dataInPartitions('processed-logs-1', 'hive-export')}</value> -+ </property> -+ <property> -+ <name>EXPORT_PATH</name> -+ <value>hdfs://bar:8020/staging/${coord:formatTime(coord:nominalTime(), 'yyyy-MM-dd-HH')}/data</value> -+ </property> -+ </configuration> -+ </workflow> -+ </action> -+</coordinator-app> -+</blockquote> -+ -+Parameterizing the input/output databases and tables using the corresponding EL function as shown will make them -+available in the hive action of the workflow 'logsreplicator-wf'. -+ -+Each coordinator action will use as input events the hourly instances of the 'processed-logs-1' dataset. The -+=${coord:dataInPartitions(String name, String type)}= function enables the coordinator application to pass the -+partition corresponding to hourly dataset instances to the workflow job triggered by the coordinator action. -+The workflow passes this partition value to the hive export script that exports the hourly partition from source -+database to the staging location referred as =EXPORT_PATH=. The hive import script imports the hourly partition from -+=EXPORT_PATH= staging location into the target database. -+ -+#HCatWorkflow -+ -+*Workflow definition:* -+ -+<blockquote> -+<workflow-app xmlns="uri:oozie:workflow:0.3" name="logsreplicator-wf"> -+ <start to="table-export"/> -+ <action name="table-export"> -+ <hive:hive xmlns:hive="uri:oozie:hive-action:0.2" xmlns="uri:oozie:hive-action:0.2"> -+ <job-tracker>${jobTracker}</job-tracker> -+ <name-node>${nameNode}</name-node> -+ <job-xml>${wf:appPath()}/conf/hive-site.xml</job-xml> -+ <configuration> -+ <property> -+ <name>mapred.job.queue.name</name> -+ <value>${queueName}</value> -+ </property> -+ <property> -+ <name>oozie.launcher.mapred.job.priority</name> -+ <value>${jobPriority}</value> -+ </property> -+ </configuration> -+ <script>${wf:appPath()}/scripts/table-export.hql</script> -+ <param>sourceDatabase=${EXPORT_DB}</param> -+ <param>sourceTable=${EXPORT_TABLE}</param> -+ <param>sourcePartition=${EXPORT_PARTITION}</param> -+ <param>sourceStagingDir=${EXPORT_PATH}</param> -+ </hive:hive> -+ <ok to="table-import"/> -+ <error to="fail"/> -+ </action> -+ <action name="table-import"> -+ <hive:hive xmlns:hive="uri:oozie:hive-action:0.2" xmlns="uri:oozie:hive-action:0.2"> -+ <job-tracker>${jobTracker}</job-tracker> -+ <name-node>${nameNode}</name-node> -+ <job-xml>${wf:appPath()}/conf/hive-site.xml</job-xml> -+ <configuration> -+ <property> -+ <name>mapred.job.queue.name</name> -+ <value>${queueName}</value> -+ </property> -+ <property> -+ <name>oozie.launcher.mapred.job.priority</name> -+ <value>${jobPriority}</value> -+ </property> -+ </configuration> -+ <script>${wf:appPath()}/scripts/table-import.hql</script> -+ <param>targetDatabase=${IMPORT_DB}</param> -+ <param>targetTable=${IMPORT_TABLE}</param> -+ <param>targetPartition=${EXPORT_PARTITION}</param> -+ <param>sourceStagingDir=${EXPORT_PATH}</param> -+ </hive:hive> -+ <ok to="end"/> -+ <error to="fail"/> -+ </action> -+ <kill name="fail"> -+ <message> -+ Workflow failed, error message[${wf:errorMessage(wf:lastErrorNode())}] -+ </message> -+ </kill> -+ <end name="end"/> -+</workflow-app> -+</blockquote> -+ -+Ensure that the following jars are in classpath, with versions corresponding to hcatalog installation: -+hcatalog-core.jar, webhcat-java-client.jar, hive-common.jar, hive-exec.jar, hive-metastore.jar, hive-serde.jar, -+ libfb303.jar. The hive-site.xml needs to be present in classpath as well. -+ -+*Example Hive Export script:* -+The following script exports a particular Hive table partition into staging location, where the partition value -+ is computed through =${coord:dataInPartitions(String name, String type)}= EL function. -+<blockquote> -+export table ${sourceDatabase}.${sourceTable} partition (${sourcePartition}) to '${sourceStagingDir}'; -+</blockquote> -+ -+For example, for the 2014-03-28T08:00Z run with the given dataset instances and ${coord:dataInPartitions( -+'processed-logs-1', 'hive-export'), the above Hive script with resolved values would look like: -+<blockquote> -+export table myInputDatabase1/myInputTable1 partition (year='2014',month='03',day='28',hour='08') to 'hdfs://bar:8020/staging/2014-03-28-08'; -+</blockquote> -+ -+*Example Hive Import script:* -+The following script imports a particular Hive table partition from staging location, where the partition value is computed -+ through =${coord:dataInPartitions(String name, String type)}= EL function. -+<blockquote> -+use ${targetDatabase}; -+alter table ${targetTable} drop if exists partition ${targetPartition}; -+import table ${targetTable} partition (${targetPartition}) from '${sourceStagingDir}'; -+</blockquote> -+ -+For example, for the 2014-03-28T08:00Z run with the given dataset instances and ${coord:dataInPartitions( -+'processed-logs-2', 'hive-export'), the above Hive script with resolved values would look like: -+ -+<blockquote> -+use myInputDatabase2; -+alter table myInputTable2 drop if exists partition (year='2014',month='03',day='28',hour='08'); -+import table myInputTable2 partition (year='2014',month='03',day='28',hour='08') from 'hdfs://bar:8020/staging/2014-03-28-08'; -+</blockquote> -+ - - ---+++ 6.9. Parameterization of Coordinator Application - -diff --git sharelib/hcatalog/src/main/java/org/apache/oozie/util/HCatURI.java sharelib/hcatalog/src/main/java/org/apache/oozie/util/HCatURI.java -index d797f9b..4bc5048 100644 ---- sharelib/hcatalog/src/main/java/org/apache/oozie/util/HCatURI.java -+++ sharelib/hcatalog/src/main/java/org/apache/oozie/util/HCatURI.java -@@ -260,6 +260,35 @@ public class HCatURI { - return filter.toString(); - } - -+ /** -+ * Get the entire partition value string from partition map. -+ * In case of type hive-export, it can be used to create entire partition value string -+ * that can be used in Hive query for partition export/import. -+ * -+ * type hive-export -+ * @return partition value string -+ */ -+ public String toPartitionValueString(String type) { -+ StringBuilder value = new StringBuilder(); -+ if (type.equals("hive-export")) { -+ String comparator = "="; -+ String separator = ","; -+ for (Map.Entry<String, String> entry : partitions.entrySet()) { -+ if (value.length() > 1) { -+ value.append(separator); -+ } -+ value.append(entry.getKey()); -+ value.append(comparator); -+ value.append(PARTITION_VALUE_QUOTE); -+ value.append(entry.getValue()); -+ value.append(PARTITION_VALUE_QUOTE); -+ } -+ } else { -+ throw new RuntimeException("Unsupported type: " + type); -+ } -+ return value.toString(); -+ } -+ - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/build-tools/src/patches/oozie-hadoop2-profile.patch ---------------------------------------------------------------------- diff --git a/build-tools/src/patches/oozie-hadoop2-profile.patch b/build-tools/src/patches/oozie-hadoop2-profile.patch deleted file mode 100644 index c555f2d..0000000 --- a/build-tools/src/patches/oozie-hadoop2-profile.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/pom.xml b/pom.xml -index 455a11d..ab4299c 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -1785,7 +1785,7 @@ - <profile> - <id>hadoop-2</id> - <activation> -- <activeByDefault>false</activeByDefault> -+ <activeByDefault>true</activeByDefault> - </activation> - <properties> - <hadoop.version>2.4.0</hadoop.version> http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/build-tools/src/patches/oozie-site.patch ---------------------------------------------------------------------- diff --git a/build-tools/src/patches/oozie-site.patch b/build-tools/src/patches/oozie-site.patch deleted file mode 100644 index a368e7f..0000000 --- a/build-tools/src/patches/oozie-site.patch +++ /dev/null @@ -1,165 +0,0 @@ ---- ./core/src/main/conf/oozie-site.xml -+++ ./core/src/main/conf/oozie-site.xml -@@ -376,4 +376,162 @@ - - --> - -+ <property> -+ <name>oozie.base.url</name> -+ <value>https://localhost:41443/oozie</value> -+ <description> -+ The Oozie base url. -+ </description> -+ </property> -+ -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-job-submit-instances</name> -+ <value> -+ now=org.apache.oozie.extensions.OozieELExtensions#ph1_now_echo, -+ today=org.apache.oozie.extensions.OozieELExtensions#ph1_today_echo, -+ yesterday=org.apache.oozie.extensions.OozieELExtensions#ph1_yesterday_echo, -+ currentWeek=org.apache.oozie.extensions.OozieELExtensions#ph1_currentWeek_echo, -+ lastWeek=org.apache.oozie.extensions.OozieELExtensions#ph1_lastWeek_echo, -+ currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_currentMonth_echo, -+ lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_lastMonth_echo, -+ currentYear=org.apache.oozie.extensions.OozieELExtensions#ph1_currentYear_echo, -+ lastYear=org.apache.oozie.extensions.OozieELExtensions#ph1_lastYear_echo, -+ formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, -+ latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, -+ future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ This property is a convenience property to add extensions to the built in executors without having to -+ include all the built in ones. -+ </description> -+ </property> -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-action-create-inst</name> -+ <value> -+ now=org.apache.oozie.extensions.OozieELExtensions#ph2_now_inst, -+ today=org.apache.oozie.extensions.OozieELExtensions#ph2_today_inst, -+ yesterday=org.apache.oozie.extensions.OozieELExtensions#ph2_yesterday_inst, -+ currentWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_currentWeek_inst, -+ lastWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_lastWeek_inst, -+ currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_currentMonth_inst, -+ lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_lastMonth_inst, -+ currentYear=org.apache.oozie.extensions.OozieELExtensions#ph2_currentYear_inst, -+ lastYear=org.apache.oozie.extensions.OozieELExtensions#ph2_lastYear_inst, -+ latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, -+ future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, -+ formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ This property is a convenience property to add extensions to the built in executors without having to -+ include all the built in ones. -+ </description> -+ </property> -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-action-create</name> -+ <value> -+ now=org.apache.oozie.extensions.OozieELExtensions#ph2_now, -+ today=org.apache.oozie.extensions.OozieELExtensions#ph2_today, -+ yesterday=org.apache.oozie.extensions.OozieELExtensions#ph2_yesterday, -+ currentWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_currentWeek, -+ lastWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_lastWeek, -+ currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_currentMonth, -+ lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_lastMonth, -+ currentYear=org.apache.oozie.extensions.OozieELExtensions#ph2_currentYear, -+ lastYear=org.apache.oozie.extensions.OozieELExtensions#ph2_lastYear, -+ latest=org.apache.oozie.coord.CoordELFunctions#ph2_coord_latest_echo, -+ future=org.apache.oozie.coord.CoordELFunctions#ph2_coord_future_echo, -+ formatTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_formatTime, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ This property is a convenience property to add extensions to the built in executors without having to -+ include all the built in ones. -+ </description> -+ </property> -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-job-submit-data</name> -+ <value> -+ now=org.apache.oozie.extensions.OozieELExtensions#ph1_now_echo, -+ today=org.apache.oozie.extensions.OozieELExtensions#ph1_today_echo, -+ yesterday=org.apache.oozie.extensions.OozieELExtensions#ph1_yesterday_echo, -+ currentWeek=org.apache.oozie.extensions.OozieELExtensions#ph1_currentWeek_echo, -+ lastWeek=org.apache.oozie.extensions.OozieELExtensions#ph1_lastWeek_echo, -+ currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_currentMonth_echo, -+ lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph1_lastMonth_echo, -+ currentYear=org.apache.oozie.extensions.OozieELExtensions#ph1_currentYear_echo, -+ lastYear=org.apache.oozie.extensions.OozieELExtensions#ph1_lastYear_echo, -+ dataIn=org.apache.oozie.extensions.OozieELExtensions#ph1_dataIn_echo, -+ instanceTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_wrap, -+ formatTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_formatTime_echo, -+ dateOffset=org.apache.oozie.coord.CoordELFunctions#ph1_coord_dateOffset_echo, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL constant declarations, separated by commas, format is [PREFIX:]NAME=CLASS#CONSTANT. -+ This property is a convenience property to add extensions to the built in executors without having to -+ include all the built in ones. -+ </description> -+ </property> -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-action-start</name> -+ <value> -+ now=org.apache.oozie.extensions.OozieELExtensions#ph2_now, -+ today=org.apache.oozie.extensions.OozieELExtensions#ph2_today, -+ yesterday=org.apache.oozie.extensions.OozieELExtensions#ph2_yesterday, -+ currentWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_currentWeek, -+ lastWeek=org.apache.oozie.extensions.OozieELExtensions#ph2_lastWeek, -+ currentMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_currentMonth, -+ lastMonth=org.apache.oozie.extensions.OozieELExtensions#ph2_lastMonth, -+ currentYear=org.apache.oozie.extensions.OozieELExtensions#ph2_currentYear, -+ lastYear=org.apache.oozie.extensions.OozieELExtensions#ph2_lastYear, -+ latest=org.apache.oozie.coord.CoordELFunctions#ph3_coord_latest, -+ future=org.apache.oozie.coord.CoordELFunctions#ph3_coord_future, -+ dataIn=org.apache.oozie.extensions.OozieELExtensions#ph3_dataIn, -+ instanceTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_nominalTime, -+ dateOffset=org.apache.oozie.coord.CoordELFunctions#ph3_coord_dateOffset, -+ formatTime=org.apache.oozie.coord.CoordELFunctions#ph3_coord_formatTime, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ This property is a convenience property to add extensions to the built in executors without having to -+ include all the built in ones. -+ </description> -+ </property> -+ -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-sla-submit</name> -+ <value> -+ instanceTime=org.apache.oozie.coord.CoordELFunctions#ph1_coord_nominalTime_echo_fixed, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ </description> -+ </property> -+ -+ <property> -+ <name>oozie.service.ELService.ext.functions.coord-sla-create</name> -+ <value> -+ instanceTime=org.apache.oozie.coord.CoordELFunctions#ph2_coord_nominalTime, -+ user=org.apache.oozie.coord.CoordELFunctions#coord_user -+ </value> -+ <description> -+ EL functions declarations, separated by commas, format is [PREFIX:]NAME=CLASS#METHOD. -+ </description> -+ </property> -+ -+ <property> -+ <name>oozie.service.HadoopAccessorService.supported.filesystems</name> -+ <value>hdfs,hftp,webhdfs,jail</value> -+ <description> -+ Enlist the different filesystems supported for federation. If wildcard "*" is specified, -+ then ALL file schemes will be allowed. -+ </description> -+ </property> -+ - </configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/pom.xml ---------------------------------------------------------------------- diff --git a/cli/pom.xml b/cli/pom.xml deleted file mode 100644 index 408c6ee..0000000 --- a/cli/pom.xml +++ /dev/null @@ -1,207 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> - -<!-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --> - -<project xmlns="http://maven.apache.org/POM/4.0.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> - - <modelVersion>4.0.0</modelVersion> - <parent> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-main</artifactId> - <version>0.10-SNAPSHOT</version> - </parent> - <artifactId>falcon-cli</artifactId> - <description>Apache Falcon CLI client</description> - <name>Apache Falcon CLI client</name> - <packaging>jar</packaging> - - <profiles> - <profile> - <id>hadoop-2</id> - <activation> - <activeByDefault>true</activeByDefault> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - </dependency> - </dependencies> - </profile> - </profiles> - - <dependencies> - <dependency> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-client</artifactId> - </dependency> - - <dependency> - <groupId>commons-net</groupId> - <artifactId>commons-net</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>commons-codec</groupId> - <artifactId>commons-codec</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.commons</groupId> - <artifactId>commons-lang3</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-client</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-core</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-json</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - </dependency> - - <dependency> - <groupId>commons-io</groupId> - <artifactId>commons-io</artifactId> - <version>2.4</version> - </dependency> - - <dependency> - <groupId>jline</groupId> - <artifactId>jline</artifactId> - </dependency> - - <dependency> - <groupId>com.github.stephenc.findbugs</groupId> - <artifactId>findbugs-annotations</artifactId> - </dependency> - - <dependency> - <groupId>org.testng</groupId> - <artifactId>testng</artifactId> - </dependency> - - <dependency> - <groupId>org.springframework.shell</groupId> - <artifactId>spring-shell</artifactId> - </dependency> - - <dependency> - <groupId>org.springframework</groupId> - <artifactId>spring-beans</artifactId> - <version>4.0.3.RELEASE</version> - </dependency> - - </dependencies> - - <build> - <plugins> - <!-- make the jar executable by adding a Main-Class and Class-Path to the manifest --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - <version>2.4.1</version> - <configuration> - <shadedArtifactAttached>false</shadedArtifactAttached> - <createDependencyReducedPom>false</createDependencyReducedPom> - <artifactSet> - <includes> - <include>org.apache.hadoop:hadoop-auth</include> - <include>org.apache.falcon:*</include> - <include>org.apache.commons:*</include> - <include>commons-logging:*</include> - <include>commons-net:*</include> - <include>commons-codec:*</include> - <include>commons-io:*</include> - <include>jline:*</include> - <include>org.slf4j:*</include> - <include>log4j:*</include> - <include>com.sun.jersey:*</include> - <include>org.springframework:*</include> - <include>org.springframework.shell:*</include> - </includes> - </artifactSet> - <filters> - <filter> - <artifact>*:*</artifact> - <excludes> - <exclude>META-INF/*.SF</exclude> - <exclude>META-INF/*.DSA</exclude> - <exclude>META-INF/*.RSA</exclude> - </excludes> - </filter> - </filters> - </configuration> - <executions> - <execution> - <phase>package</phase> - <goals> - <goal>shade</goal> - </goals> - <configuration> - <transformers> - <transformer - implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> - <transformer - implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/> - <transformer - implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"/> - </transformers> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - </build> -</project> http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/FalconCLIRuntimeException.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/FalconCLIRuntimeException.java b/cli/src/main/java/org/apache/falcon/cli/FalconCLIRuntimeException.java deleted file mode 100644 index b7fa4cd..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/FalconCLIRuntimeException.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli; - -/** - * Runtime exception in CLI. Since most methods are invoked through reflection, checked exceptions - * end up being thrown as UndeclaredThrowableException. Instead of that, let's throw our own RuntimeException. - */ -public class FalconCLIRuntimeException extends RuntimeException { - public FalconCLIRuntimeException(Throwable e) { - super(e); - } - - public FalconCLIRuntimeException(String message) { - super(message); - } - - public FalconCLIRuntimeException(String message, Throwable cause) { - super(message, cause); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/commands/BaseFalconCommands.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/commands/BaseFalconCommands.java b/cli/src/main/java/org/apache/falcon/cli/commands/BaseFalconCommands.java deleted file mode 100644 index dbd28fb..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/commands/BaseFalconCommands.java +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli.commands; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.cli.FalconCLIRuntimeException; -import org.apache.falcon.client.FalconCLIException; -import org.apache.falcon.client.FalconClient; -import org.springframework.shell.core.ExecutionProcessor; -import org.springframework.shell.event.ParseResult; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -/** - * Common code for all falcon command classes. - */ -public class BaseFalconCommands implements ExecutionProcessor { - private static final String FALCON_URL = "FALCON_URL"; - private static final String FALCON_URL_PROPERTY = "falcon.url"; - private static final String DO_AS = "DO_AS"; - private static final String DO_AS_PROPERTY = "do.as"; - private static final String CLIENT_PROPERTIES = "/client.properties"; - private static Properties clientProperties; - private static Properties backupProperties = new Properties(); - private static FalconClient client; - - protected static Properties getClientProperties() { - if (clientProperties == null) { - InputStream inputStream = null; - Properties prop = new Properties(System.getProperties()); - prop.putAll(backupProperties); - try { - inputStream = BaseFalconCommands.class.getResourceAsStream(CLIENT_PROPERTIES); - if (inputStream != null) { - try { - prop.load(inputStream); - } catch (IOException e) { - throw new FalconCLIRuntimeException(e); - } - } - } finally { - IOUtils.closeQuietly(inputStream); - } - String urlOverride = System.getenv(FALCON_URL); - if (urlOverride != null) { - prop.setProperty(FALCON_URL_PROPERTY, urlOverride); - } - if (prop.getProperty(FALCON_URL_PROPERTY) == null) { - throw new FalconCLIRuntimeException("Failed to get falcon url from environment or client properties"); - } - String doAsOverride = System.getenv(DO_AS); - if (doAsOverride != null) { - prop.setProperty(DO_AS_PROPERTY, doAsOverride); - } - clientProperties = prop; - backupProperties.clear(); - } - return clientProperties; - } - - static void setClientProperty(String key, String value) { - Properties props; - try { - props = getClientProperties(); - } catch (FalconCLIRuntimeException e) { - props = backupProperties; - } - if (StringUtils.isBlank(value)) { - props.remove(key); - } else { - props.setProperty(key, value); - } - // Re-load client in the next call - client = null; - } - - public static FalconClient getFalconClient() { - if (client == null) { - try { - client = new FalconClient(getClientProperties().getProperty(FALCON_URL_PROPERTY), - getClientProperties()); - } catch (FalconCLIException e) { - throw new FalconCLIRuntimeException(e.getMessage(), e.getCause()); - } - } - return client; - } - - @Override - public ParseResult beforeInvocation(ParseResult parseResult) { - Object[] args = parseResult.getArguments(); - if (args != null) { - boolean allEqual = true; - for (int i = 1; i < args.length; i++) { - allEqual &= args[0].equals(args[i]); - } - if (allEqual) { - if (args[0] instanceof String) { - String[] split = ((String) args[0]).split("\\s+"); - Object[] newArgs = new String[args.length]; - System.arraycopy(split, 0, newArgs, 0, split.length); - parseResult = new ParseResult(parseResult.getMethod(), parseResult.getInstance(), newArgs); - } - } - } - return parseResult; - } - - @Override - public void afterReturningInvocation(ParseResult parseResult, Object o) { - - } - - @Override - public void afterThrowingInvocation(ParseResult parseResult, Throwable throwable) { - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/commands/FalconConnectionCommands.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/commands/FalconConnectionCommands.java b/cli/src/main/java/org/apache/falcon/cli/commands/FalconConnectionCommands.java deleted file mode 100644 index cabe5a8..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/commands/FalconConnectionCommands.java +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.falcon.cli.commands; - - -import org.apache.commons.lang3.StringUtils; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; - -import javax.annotation.Nonnull; - -/** - * Connection Commands. - */ -public class FalconConnectionCommands extends BaseFalconCommands { - - @CliCommand(value = "get", help = "get properties") - public String getParameter(@CliOption(key = {"", "key"}, mandatory = false, help = "<key>") final String key) { - if (StringUtils.isBlank(key)) { - return getClientProperties().toString(); - } - return getClientProperties().getProperty(key); - } - - @CliCommand(value = "set", help = "set properties") - public void setParameter(@CliOption(key = {"", "keyval"}, mandatory = true, help = "<key-val>") - @Nonnull final String keyVal) { - String[] kvArray = keyVal.split("="); - String key = ""; - String value = ""; - if (kvArray.length > 0) { - key = kvArray[0]; - } - if (kvArray.length > 1) { - value = kvArray[1]; - } - setClientProperty(key, value); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/commands/FalconEntityCommands.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/commands/FalconEntityCommands.java b/cli/src/main/java/org/apache/falcon/cli/commands/FalconEntityCommands.java deleted file mode 100644 index 6e091ef..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/commands/FalconEntityCommands.java +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli.commands; - -/** - * Entity Commands. - */ -public class FalconEntityCommands extends BaseFalconCommands { - -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/commands/FalconInstanceCommands.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/commands/FalconInstanceCommands.java b/cli/src/main/java/org/apache/falcon/cli/commands/FalconInstanceCommands.java deleted file mode 100644 index 8f3a2fc..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/commands/FalconInstanceCommands.java +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli.commands; - -/** - * Instance commands. - */ -public class FalconInstanceCommands extends BaseFalconCommands { - -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/skel/FalconBanner.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/skel/FalconBanner.java b/cli/src/main/java/org/apache/falcon/cli/skel/FalconBanner.java deleted file mode 100644 index 03c56c9..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/skel/FalconBanner.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.falcon.cli.skel; - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultBannerProvider; -import org.springframework.shell.support.util.OsUtils; -import org.springframework.stereotype.Component; - -/** - * The Class FalconBanner. - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class FalconBanner extends DefaultBannerProvider { - - @Override - public String getBanner() { - return new StringBuilder() - .append("=======================================").append(OsUtils.LINE_SEPARATOR) - .append("* *").append(OsUtils.LINE_SEPARATOR) - .append("* Falcon CLI *").append(OsUtils.LINE_SEPARATOR) - .append("* *").append(OsUtils.LINE_SEPARATOR) - .append("=======================================").append(OsUtils.LINE_SEPARATOR) - .toString(); - - } - - @Override - public String getWelcomeMessage() { - return "Welcome to Falcon CLI"; - } - - @Override - public String getVersion() { - return getClass().getPackage().getImplementationVersion(); - } - - @Override - public String getProviderName() { - return "Falcon CLI"; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/skel/FalconHistoryFileProvider.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/skel/FalconHistoryFileProvider.java b/cli/src/main/java/org/apache/falcon/cli/skel/FalconHistoryFileProvider.java deleted file mode 100644 index 74d003a..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/skel/FalconHistoryFileProvider.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli.skel; - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider; -import org.springframework.stereotype.Component; - -/** - * The Class FalconHistoryFileProvider. - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class FalconHistoryFileProvider extends DefaultHistoryFileNameProvider { - - public String getHistoryFileName() { - return "falcon-cli-hist.log"; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider#name() - */ - @Override - public String getProviderName() { - return "falcon client history provider"; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/java/org/apache/falcon/cli/skel/FalconPromptProvider.java ---------------------------------------------------------------------- diff --git a/cli/src/main/java/org/apache/falcon/cli/skel/FalconPromptProvider.java b/cli/src/main/java/org/apache/falcon/cli/skel/FalconPromptProvider.java deleted file mode 100644 index d8ead5b..0000000 --- a/cli/src/main/java/org/apache/falcon/cli/skel/FalconPromptProvider.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.cli.skel; - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultPromptProvider; -import org.springframework.stereotype.Component; - -/** - * The Class FalconPromptProvider. - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class FalconPromptProvider extends DefaultPromptProvider { - - @Override - public String getPrompt() { - return "falcon-shell>"; - } - - /* - * (non-Javadoc) - * - * @see org.springframework.shell.plugin.support.DefaultPromptProvider#name() - */ - @Override - public String getProviderName() { - return "falcon prompt provider"; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/cli/src/main/resources/META-INF/spring/spring-shell-plugin.xml ---------------------------------------------------------------------- diff --git a/cli/src/main/resources/META-INF/spring/spring-shell-plugin.xml b/cli/src/main/resources/META-INF/spring/spring-shell-plugin.xml deleted file mode 100644 index bd0fed4..0000000 --- a/cli/src/main/resources/META-INF/spring/spring-shell-plugin.xml +++ /dev/null @@ -1,40 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- - - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - ---> -<beans xmlns="http://www.springframework.org/schema/beans" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation=" - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd"> - - <context:component-scan base-package="org.apache.falcon.cli"/> - - <bean id="falconConnectionCommands" - class="org.apache.falcon.cli.commands.FalconConnectionCommands"> - </bean> - <bean id="falconEntityCommands" - class="org.apache.falcon.cli.commands.FalconEntityCommands"> - </bean> - <bean id="falconInstanceCommands" - class="org.apache.falcon.cli.commands.FalconInstanceCommands"> - </bean> -</beans> http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/pom.xml ---------------------------------------------------------------------- diff --git a/client/pom.xml b/client/pom.xml deleted file mode 100644 index 743b93d..0000000 --- a/client/pom.xml +++ /dev/null @@ -1,161 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> - -<!-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --> - -<project xmlns="http://maven.apache.org/POM/4.0.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> - - <modelVersion>4.0.0</modelVersion> - <parent> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-main</artifactId> - <version>0.10-SNAPSHOT</version> - </parent> - <artifactId>falcon-client</artifactId> - <description>Apache Falcon Java client</description> - <name>Apache Falcon Java client</name> - <packaging>jar</packaging> - - <profiles> - <profile> - <id>hadoop-2</id> - <activation> - <activeByDefault>true</activeByDefault> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - </dependency> - </dependencies> - </profile> - </profiles> - - <dependencies> - <dependency> - <groupId>commons-cli</groupId> - <artifactId>commons-cli</artifactId> - </dependency> - - <dependency> - <groupId>commons-net</groupId> - <artifactId>commons-net</artifactId> - </dependency> - - <dependency> - <groupId>commons-codec</groupId> - <artifactId>commons-codec</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.commons</groupId> - <artifactId>commons-lang3</artifactId> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-client</artifactId> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-json</artifactId> - </dependency> - - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - </dependency> - <dependency> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </dependency> - - <dependency> - <groupId>commons-io</groupId> - <artifactId>commons-io</artifactId> - </dependency> - - <dependency> - <groupId>com.github.stephenc.findbugs</groupId> - <artifactId>findbugs-annotations</artifactId> - </dependency> - - <dependency> - <groupId>org.testng</groupId> - <artifactId>testng</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hive.hcatalog</groupId> - <artifactId>hive-webhcat-java-client</artifactId> - <version>${hive.version}</version> - </dependency> - </dependencies> - - <build> - <plugins> - <plugin> - <artifactId>maven-dependency-plugin</artifactId> - <executions> - <execution> - <phase>package</phase> - <goals> - <goal>copy-dependencies</goal> - </goals> - <configuration> - <outputDirectory>${project.build.directory}/dependency</outputDirectory> - </configuration> - </execution> - </executions> - </plugin> - <plugin> - <groupId>org.jvnet.jaxb2.maven2</groupId> - <artifactId>maven-jaxb2-plugin</artifactId> - <version>0.8.0</version> - <executions> - <execution> - <id>1</id> - <phase>generate-sources</phase> - <goals> - <goal>generate</goal> - </goals> - </execution> - </executions> - <configuration> - <args> - <arg>-extension</arg> - <arg>-Xinheritance</arg> - </args> - <plugins> - <plugin> - <groupId>org.jvnet.jaxb2_commons</groupId> - <artifactId>jaxb2-basics</artifactId> - <version>0.6.3</version> - </plugin> - </plugins> - </configuration> - </plugin> - </plugins> - </build> -</project> http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/LifeCycle.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/LifeCycle.java b/client/src/main/java/org/apache/falcon/LifeCycle.java deleted file mode 100644 index 0ecddd1..0000000 --- a/client/src/main/java/org/apache/falcon/LifeCycle.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon; - -/** - * Represents life cycle of an entity. - */ - -public enum LifeCycle { - EXECUTION(Tag.DEFAULT), - EVICTION(Tag.RETENTION), - REPLICATION(Tag.REPLICATION), - IMPORT(Tag.IMPORT), - EXPORT(Tag.EXPORT); - - private final Tag tag; - - LifeCycle(Tag tag) { - this.tag = tag; - } - - public Tag getTag() { - return this.tag; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/Pair.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/Pair.java b/client/src/main/java/org/apache/falcon/Pair.java deleted file mode 100644 index d4cea90..0000000 --- a/client/src/main/java/org/apache/falcon/Pair.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon; - -import java.io.Serializable; - -/** - * Simple pair class to hold a pair of object of specific class. - * @param <A> - First element in pair. - * @param <B> - Second element in pair - */ -public class Pair<A, B> implements Serializable { - - private static final long serialVersionUID = 1L; - - //SUSPEND CHECKSTYLE CHECK VisibilityModifierCheck - public final A first; - public final B second; - //RESUME CHECKSTYLE CHECK VisibilityModifierCheck - - public Pair(A fst, B snd) { - this.first = fst; - this.second = snd; - } - - public static <A, B> Pair<A, B> of(A a, B b) { - return new Pair<A, B>(a, b); - } - - @Override - public String toString() { - return "(" + first + "," + second + ")"; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - Pair pair = (Pair) o; - - if (first != null ? !first.equals(pair.first) : pair.first != null) { - return false; - } - if (second != null ? !second.equals(pair.second) : pair.second != null) { - return false; - } - - return true; - } - - @Override - public int hashCode() { - int result = first != null ? first.hashCode() : 0; - result = 31 * result + (second != null ? second.hashCode() : 0); - return result; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/ResponseHelper.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/ResponseHelper.java b/client/src/main/java/org/apache/falcon/ResponseHelper.java deleted file mode 100644 index 8f22af7..0000000 --- a/client/src/main/java/org/apache/falcon/ResponseHelper.java +++ /dev/null @@ -1,304 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon; - -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.entity.v0.SchemaHelper; -import org.apache.falcon.resource.EntitySummaryResult; -import org.apache.falcon.resource.FeedInstanceResult; -import org.apache.falcon.resource.FeedLookupResult; -import org.apache.falcon.resource.InstanceDependencyResult; -import org.apache.falcon.resource.InstancesResult; -import org.apache.falcon.resource.InstancesSummaryResult; -import org.apache.falcon.resource.SchedulableEntityInstanceResult; -import org.apache.falcon.resource.TriageResult; - -import java.util.Date; -import java.util.Formatter; -import java.util.Map; - -/** - * Helpers for response object to string conversion. - */ - -public final class ResponseHelper { - - private ResponseHelper() { } - - public static String getString(EntitySummaryResult result) { - StringBuilder sb = new StringBuilder(); - String toAppend; - sb.append("Consolidated Status: ").append(result.getStatus()) - .append("\n"); - sb.append("\nEntity Summary Result :\n"); - if (result.getEntitySummaries() != null) { - for (EntitySummaryResult.EntitySummary entitySummary : result.getEntitySummaries()) { - toAppend = entitySummary.toString(); - sb.append(toAppend).append("\n"); - } - } - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(result.getMessage()); - sb.append("Request Id: ").append(result.getRequestId()); - return sb.toString(); - } - - public static String getString(InstancesResult result, String runid) { - StringBuilder sb = new StringBuilder(); - String toAppend; - - sb.append("Consolidated Status: ").append(result.getStatus()) - .append("\n"); - - sb.append("\nInstances:\n"); - sb.append("Instance\t\tCluster\t\tSourceCluster\t\tStatus\t\tRunID\t\t\tLog\n"); - sb.append("-----------------------------------------------------------------------------------------------\n"); - if (result.getInstances() != null) { - for (InstancesResult.Instance instance : result.getInstances()) { - - toAppend = - (instance.getInstance() != null) ? instance.getInstance() - : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - instance.getCluster() != null ? instance.getCluster() : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - instance.getSourceCluster() != null ? instance - .getSourceCluster() : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - (instance.getStatus() != null ? instance.getStatus() - .toString() : "-"); - sb.append(toAppend).append("\t"); - - toAppend = (runid != null ? runid : "latest"); - sb.append(toAppend).append("\t"); - - toAppend = - instance.getLogFile() != null ? instance.getLogFile() : "-"; - sb.append(toAppend).append("\n"); - - if (instance.actions != null) { - sb.append("actions:\n"); - for (InstancesResult.InstanceAction action : instance.actions) { - sb.append(" ").append(action.getAction()) - .append("\t"); - sb.append(action.getStatus()).append("\t") - .append(action.getLogFile()).append("\n"); - } - } - } - } - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(result.getMessage()); - sb.append("Request Id: ").append(result.getRequestId()); - return sb.toString(); - } - - public static String getString(FeedInstanceResult result) { - StringBuilder sb = new StringBuilder(); - String toAppend; - - sb.append("Consolidated Status: ").append(result.getStatus()) - .append("\n"); - - sb.append("\nInstances:\n"); - Formatter formatter = new Formatter(sb); - formatter.format("%-16s%-20s%-16s%-16s%-20s%-16s", "Cluster", "Instance", "Status", "Size", "CreationTime", - "Details"); - sb.append("\n"); - sb.append("-----------------------------------------------------------------------------------------------\n"); - if (result.getInstances() != null) { - for (FeedInstanceResult.Instance instance : result.getInstances()) { - formatter.format("%-16s", instance.getCluster() != null ? instance.getCluster() : "-"); - formatter.format("%-20s", instance.getInstance() != null ? instance.getInstance() : "-"); - formatter.format("%-16s", instance.getStatus() != null ? instance.getStatus() : "-"); - formatter.format("%-16s", instance.getSize() != -1 ? instance.getSizeH() : "-"); - formatter.format("%-20s", instance.getCreationTime() != 0 - ? SchemaHelper.formatDateUTC(new Date(instance.getCreationTime())) : "-"); - formatter.format("%-16s", StringUtils.isEmpty(instance.getUri()) ? "-" : instance.getUri()); - sb.append("\n"); - } - } - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(result.getMessage()); - sb.append("Request Id: ").append(result.getRequestId()); - return sb.toString(); - } - - public static String getString(InstancesResult result) { - StringBuilder sb = new StringBuilder(); - String toAppend; - - sb.append("Consolidated Status: ").append(result.getStatus()) - .append("\n"); - - sb.append("\nInstances:\n"); - sb.append("Instance\t\tCluster\t\tSourceCluster\t\tStatus\t\tStart\t\tEnd\t\tDetails\t\t\t\t\tLog\n"); - sb.append("-----------------------------------------------------------------------------------------------\n"); - if (result.getInstances() != null) { - for (InstancesResult.Instance instance : result.getInstances()) { - - toAppend = - instance.getInstance() != null ? instance.getInstance() - : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - instance.getCluster() != null ? instance.getCluster() : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - instance.getSourceCluster() != null ? instance - .getSourceCluster() : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - (instance.getStatus() != null ? instance.getStatus() - .toString() : "-"); - sb.append(toAppend).append("\t"); - - toAppend = instance.getStartTime() != null - ? SchemaHelper.formatDateUTC(instance.getStartTime()) : "-"; - sb.append(toAppend).append("\t"); - - toAppend = instance.getEndTime() != null - ? SchemaHelper.formatDateUTC(instance.getEndTime()) : "-"; - sb.append(toAppend).append("\t"); - - toAppend = (!StringUtils.isEmpty(instance.getDetails())) - ? instance.getDetails() : "-"; - sb.append(toAppend).append("\t"); - - toAppend = - instance.getLogFile() != null ? instance.getLogFile() : "-"; - sb.append(toAppend).append("\n"); - - if (instance.getWfParams() != null) { - InstancesResult.KeyValuePair[] props = instance.getWfParams(); - sb.append("Workflow params").append("\n"); - for (InstancesResult.KeyValuePair entry : props) { - sb.append(entry.getKey()).append("=") - .append(entry.getValue()).append("\n"); - } - sb.append("\n"); - } - - if (instance.actions != null) { - sb.append("actions:\n"); - for (InstancesResult.InstanceAction action : instance.actions) { - sb.append(" ").append(action.getAction()).append("\t"); - sb.append(action.getStatus()).append("\t") - .append(action.getLogFile()).append("\n"); - } - } - } - } - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(result.getMessage()); - sb.append("Request Id: ").append(result.getRequestId()); - return sb.toString(); - } - - public static String getString(InstancesSummaryResult result) { - StringBuilder sb = new StringBuilder(); - String toAppend; - - sb.append("Consolidated Status: ").append(result.getStatus()) - .append("\n"); - sb.append("\nInstances Summary:\n"); - - if (result.getInstancesSummary() != null) { - for (InstancesSummaryResult.InstanceSummary summary : result - .getInstancesSummary()) { - toAppend = - summary.getCluster() != null ? summary.getCluster() : "-"; - sb.append("Cluster: ").append(toAppend).append("\n"); - - sb.append("Status\t\tCount\n"); - sb.append("-------------------------\n"); - - for (Map.Entry<String, Long> entry : summary.getSummaryMap() - .entrySet()) { - sb.append(entry.getKey()).append("\t\t") - .append(entry.getValue()).append("\n"); - } - } - } - - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(result.getMessage()); - sb.append("Request Id: ").append(result.getRequestId()); - return sb.toString(); - } - - public static String getString(TriageResult triageResult) { - StringBuilder sb = new StringBuilder(); - - sb.append(triageResult.toString()); - sb.append("\nAdditional Information:\n"); - sb.append("Response: ").append(triageResult.getMessage()); - sb.append("Request Id: ").append(triageResult.getRequestId()); - - return sb.toString(); - } - - public static String getString(FeedLookupResult feedLookupResult) { - StringBuilder sb = new StringBuilder(); - String results = feedLookupResult.toString(); - if (StringUtils.isEmpty(results)) { - sb.append("No matching feeds found!"); - } else { - sb.append(results); - } - sb.append("\n\nResponse: ").append(feedLookupResult.getMessage()); - sb.append("\nRequest Id: ").append(feedLookupResult.getRequestId()); - return sb.toString(); - } - - public static String getString(InstanceDependencyResult dependencyResult) { - StringBuilder sb = new StringBuilder(); - String results = dependencyResult.toString(); - if (StringUtils.isEmpty(results)) { - sb.append("No dependencies found!"); - } else { - sb.append(results); - } - sb.append("\n\nResponse: ").append(dependencyResult.getMessage()); - sb.append("\nRequest Id: ").append(dependencyResult.getRequestId()); - return sb.toString(); - } - - public static String getString(SchedulableEntityInstanceResult instances) { - StringBuilder sb = new StringBuilder(); - String results = instances.toString(); - if (StringUtils.isEmpty(results)) { - sb.append("No sla miss found!"); - } else { - sb.append(results); - } - sb.append("\n\nResponse: ").append(instances.getMessage()); - sb.append("\nRequest Id: ").append(instances.getRequestId()); - return sb.toString(); - } -}
