http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/entity/v0/Entity.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/entity/v0/Entity.java b/client/src/main/java/org/apache/falcon/entity/v0/Entity.java deleted file mode 100644 index ba6f2e5..0000000 --- a/client/src/main/java/org/apache/falcon/entity/v0/Entity.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.entity.v0; - -import javax.xml.bind.Marshaller; -import javax.xml.bind.Unmarshaller; -import java.io.StringReader; -import java.io.StringWriter; - -/** - * Base class that all entity jaxb object will extend. - */ -public abstract class Entity { - public abstract String getName(); - - public abstract String getTags(); - - public abstract AccessControlList getACL(); - - public EntityType getEntityType() { - for (EntityType type : EntityType.values()) { - if (type.getEntityClass().equals(getClass())) { - return type; - } - } - return null; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || !o.getClass().equals(this.getClass())) { - return false; - } - - Entity entity = (Entity) o; - - String name = getName(); - return !(name != null ? !name.equals(entity.getName()) : entity.getName() != null); - } - - @Override - public int hashCode() { - String clazz = this.getClass().getName(); - - String name = getName(); - int result = name != null ? name.hashCode() : 0; - result = 31 * result + clazz.hashCode(); - return result; - } - - @Override - public String toString() { - try { - StringWriter stringWriter = new StringWriter(); - Marshaller marshaller = getEntityType().getMarshaller(); - marshaller.marshal(this, stringWriter); - return stringWriter.toString(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public static Entity fromString(EntityType type, String str) { - try { - Unmarshaller unmarshaler = type.getUnmarshaller(); - return (Entity) unmarshaler.unmarshal(new StringReader(str)); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public String toShortString() { - return "(" + getEntityType().name().toLowerCase() + ") " + getName(); - } - - public Entity copy() { - return fromString(getEntityType(), toString()); - } -}
http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/entity/v0/EntityNotification.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/entity/v0/EntityNotification.java b/client/src/main/java/org/apache/falcon/entity/v0/EntityNotification.java deleted file mode 100644 index bab70d4..0000000 --- a/client/src/main/java/org/apache/falcon/entity/v0/EntityNotification.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.entity.v0; - -/** - * EntityNotification class to be extended by Feed/Process notification class. - */ -public abstract class EntityNotification { - public abstract String getType(); - public abstract String getLevel(); - public abstract String getTo(); - - public String toString() { - return "Notification{" - + "type=" + getType() - + ", to=" + getTo() - + "}"; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/entity/v0/EntityType.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/entity/v0/EntityType.java b/client/src/main/java/org/apache/falcon/entity/v0/EntityType.java deleted file mode 100644 index 3d55547..0000000 --- a/client/src/main/java/org/apache/falcon/entity/v0/EntityType.java +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.entity.v0; - -import org.apache.falcon.entity.v0.cluster.Cluster; -import org.apache.falcon.entity.v0.feed.Feed; -import org.apache.falcon.entity.v0.process.Process; -import org.apache.falcon.entity.v0.datasource.Datasource; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; -import javax.xml.bind.ValidationEvent; -import javax.xml.bind.ValidationEventHandler; -import javax.xml.bind.Marshaller; -import javax.xml.bind.Unmarshaller; -import javax.xml.validation.Schema; -import javax.xml.validation.SchemaFactory; -import java.util.Arrays; - -/** - * Enum for types of entities in Falcon Process, Feed and Cluster. - */ -public enum EntityType { - FEED(Feed.class, "/feed-0.1.xsd", "name"), - PROCESS(Process.class, "/process-0.1.xsd", "name"), - CLUSTER(Cluster.class, "/cluster-0.1.xsd", "name"), - DATASOURCE(Datasource.class, "/datasource-0.1.xsd", "name"); - - //Fail unmarshalling of whole xml if unmarshalling of any element fails - private static class EventHandler implements ValidationEventHandler { - @Override - public boolean handleEvent(ValidationEvent event) { - return false; - } - } - - private static final String NS = "http://www.w3.org/2001/XMLSchema"; - - private final Class<? extends Entity> clazz; - private JAXBContext jaxbContext; - private Schema schema; - private String[] immutableProperties; - - private String schemaFile; - - private EntityType(Class<? extends Entity> typeClass, String schemaFile, String... immutableProperties) { - clazz = typeClass; - this.immutableProperties = immutableProperties; - this.schemaFile = schemaFile; - try { - jaxbContext = JAXBContext.newInstance(typeClass); - synchronized (this) { - SchemaFactory schemaFactory = SchemaFactory.newInstance(NS); - schema = schemaFactory.newSchema(getClass().getResource(schemaFile)); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public Class<? extends Entity> getEntityClass() { - return clazz; - } - - public String getSchemaFile() { - return schemaFile; - } - - public Marshaller getMarshaller() throws JAXBException { - Marshaller marshaller = jaxbContext.createMarshaller(); - marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); - return marshaller; - } - - public Unmarshaller getUnmarshaller() throws JAXBException { - Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); - unmarshaller.setSchema(schema); - unmarshaller.setEventHandler(new EventHandler()); - return unmarshaller; - } - - - public boolean isSchedulable() { - // Cluster and Datasource are not schedulable like Feed and Process - return ((this != EntityType.CLUSTER) && (this != EntityType.DATASOURCE)); - } - - @edu.umd.cs.findbugs.annotations.SuppressWarnings({"EI_EXPOSE_REP"}) - public String[] getImmutableProperties() { - return immutableProperties; - } - - public static EntityType getEnum(String type) { - try { - return EntityType.valueOf(type.toUpperCase().trim()); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("Invalid entity type: " + type + ". Expected " - + Arrays.toString(values()).toLowerCase() + "."); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/entity/v0/Frequency.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/entity/v0/Frequency.java b/client/src/main/java/org/apache/falcon/entity/v0/Frequency.java deleted file mode 100644 index f423df6..0000000 --- a/client/src/main/java/org/apache/falcon/entity/v0/Frequency.java +++ /dev/null @@ -1,113 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.entity.v0; - -import java.util.Calendar; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Frequency as supported in the xsd definitions. - */ -public class Frequency { - private static final Pattern PATTERN = Pattern.compile("(minutes|hours|days|months)\\((\\d+)\\)"); - - /** - * TimeUnit corresponding to the frequency. - */ - public static enum TimeUnit { - minutes(Calendar.MINUTE), hours(Calendar.HOUR), days(Calendar.DATE), months(Calendar.MONTH); - - private int calendarUnit; - - private TimeUnit(int calendarUnit) { - this.calendarUnit = calendarUnit; - } - - public int getCalendarUnit() { - return calendarUnit; - } - } - - private TimeUnit timeUnit; - private String frequency; - - public Frequency(String freq, TimeUnit timeUnit) { - this.frequency = freq; - this.timeUnit = timeUnit; - } - - public Frequency(String strValue) { - Matcher matcher = PATTERN.matcher(strValue); - if (!matcher.matches()) { - throw new IllegalArgumentException("Invalid frequency: " + strValue); - } - - timeUnit = TimeUnit.valueOf(matcher.group(1)); - frequency = matcher.group(2); - } - - public static Frequency fromString(String strValue) { - return new Frequency(strValue); - } - - public static String toString(Frequency freq) { - return freq==null? null:freq.toString(); - } - - @Override - public String toString() { - return timeUnit.name() + "(" + frequency + ")"; - } - - public TimeUnit getTimeUnit() { - return timeUnit; - } - - public String getFrequency() { - return frequency; - } - - public int getFrequencyAsInt() { - return Integer.parseInt(frequency); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - - if (!(obj instanceof Frequency)) { - return false; - } - - Frequency freq = (Frequency) obj; - return this == freq || this.getFrequency().equals(freq.getFrequency()) - && this.getTimeUnit() == freq.getTimeUnit(); - - } - - @Override - public int hashCode() { - int result = timeUnit.hashCode(); - result = 31 * result + frequency.hashCode(); - return result; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/entity/v0/SchemaHelper.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/entity/v0/SchemaHelper.java b/client/src/main/java/org/apache/falcon/entity/v0/SchemaHelper.java deleted file mode 100644 index 1c02f37..0000000 --- a/client/src/main/java/org/apache/falcon/entity/v0/SchemaHelper.java +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.entity.v0; - -import java.text.DateFormat; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.TimeZone; - -/** - * Support function to parse and format date in xsd string. - */ -public final class SchemaHelper { - - public static final String ISO8601_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'"; - - private SchemaHelper() {} - - public static String getTimeZoneId(TimeZone tz) { - return tz.getID(); - } - - public static DateFormat getDateFormat() { - DateFormat dateFormat = new SimpleDateFormat(ISO8601_FORMAT); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - return dateFormat; - } - - public static String formatDateUTC(Date date) { - return (date != null) ? getDateFormat().format(date) : null; - } - - public static Date parseDateUTC(String dateStr) { - if (!DateValidator.validate(dateStr)) { - throw new IllegalArgumentException(dateStr + " is not a valid UTC string"); - } - try { - return getDateFormat().parse(dateStr); - } catch (ParseException e) { - throw new RuntimeException("Unable to parse date: " + dateStr, e); - } - } - - public static String formatDateUTCToISO8601(final String dateString, final String dateStringFormat) { - - try { - DateFormat dateFormat = new SimpleDateFormat(dateStringFormat.substring(0, dateString.length())); - dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); - return SchemaHelper.formatDateUTC(dateFormat.parse(dateString)); - } catch (ParseException e) { - throw new RuntimeException(e); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/metadata/RelationshipType.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/metadata/RelationshipType.java b/client/src/main/java/org/apache/falcon/metadata/RelationshipType.java deleted file mode 100644 index 6624319..0000000 --- a/client/src/main/java/org/apache/falcon/metadata/RelationshipType.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.metadata; - -/** - * Enumerates Relationship types. - */ -public enum RelationshipType { - - // entity vertex types - CLUSTER_ENTITY("cluster-entity"), - FEED_ENTITY("feed-entity"), - PROCESS_ENTITY("process-entity"), - DATASOURCE_ENTITY("datasource-entity"), - - // instance vertex types - FEED_INSTANCE("feed-instance"), - PROCESS_INSTANCE("process-instance"), - IMPORT_INSTANCE("import-instance"), - - // Misc vertex types - USER("user"), - COLO("data-center"), - TAGS("classification"), - GROUPS("group"), - PIPELINES("pipelines"), - REPLICATION_METRICS("replication-metrics"); - - - private final String name; - - RelationshipType(java.lang.String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public static RelationshipType fromString(String value) { - if (value != null) { - for (RelationshipType type : RelationshipType.values()) { - if (value.equals(type.getName())) { - return type; - } - } - } - - throw new IllegalArgumentException("No constant with value " + value + " found"); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java b/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java deleted file mode 100644 index cf24078..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import org.apache.commons.lang3.StringUtils; - -import java.util.Properties; -import java.io.File; - -/** - * Hdfs Replication recipe tool for Falcon recipes. - */ -public class HdfsReplicationRecipeTool implements Recipe { - - private static final String COMMA_SEPARATOR = ","; - - @Override - public void validate(final Properties recipeProperties) { - for (HdfsReplicationRecipeToolOptions option : HdfsReplicationRecipeToolOptions.values()) { - if (recipeProperties.getProperty(option.getName()) == null && option.isRequired()) { - throw new IllegalArgumentException("Missing argument: " + option.getName()); - } - } - } - - @Override - public Properties getAdditionalSystemProperties(final Properties recipeProperties) { - Properties additionalProperties = new Properties(); - - // Construct fully qualified hdfs src path - String srcPaths = recipeProperties.getProperty(HdfsReplicationRecipeToolOptions - .REPLICATION_SOURCE_DIR.getName()); - StringBuilder absoluteSrcPaths = new StringBuilder(); - String srcFsPath = recipeProperties.getProperty( - HdfsReplicationRecipeToolOptions.REPLICATION_SOURCE_CLUSTER_FS_WRITE_ENDPOINT.getName()); - if (StringUtils.isNotEmpty(srcFsPath)) { - srcFsPath = StringUtils.removeEnd(srcFsPath, File.separator); - } - if (StringUtils.isNotEmpty(srcPaths)) { - String[] paths = srcPaths.split(COMMA_SEPARATOR); - - for (String path : paths) { - StringBuilder srcpath = new StringBuilder(srcFsPath); - srcpath.append(path.trim()); - srcpath.append(COMMA_SEPARATOR); - absoluteSrcPaths.append(srcpath); - } - } - - additionalProperties.put(HdfsReplicationRecipeToolOptions.REPLICATION_SOURCE_DIR.getName(), - StringUtils.removeEnd(absoluteSrcPaths.toString(), COMMA_SEPARATOR)); - return additionalProperties; - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java b/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java deleted file mode 100644 index 4c3b543..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -/** - * Hdfs Recipe tool options. - */ -public enum HdfsReplicationRecipeToolOptions { - REPLICATION_SOURCE_DIR("drSourceDir", "Location of source data to replicate"), - REPLICATION_SOURCE_CLUSTER_FS_WRITE_ENDPOINT("drSourceClusterFS", "Source replication cluster end point"), - REPLICATION_TARGET_DIR("drTargetDir", "Location on target cluster for replication"), - REPLICATION_TARGET_CLUSTER_FS_WRITE_ENDPOINT("drTargetClusterFS", "Target replication cluster end point"), - REPLICATION_MAX_MAPS("distcpMaxMaps", "Maximum number of maps used during replication"), - REPLICATION_MAP_BANDWIDTH_IN_MB("distcpMapBandwidth", "Bandwidth in MB/s used by each mapper during replication"); - - private final String name; - private final String description; - private final boolean isRequired; - - HdfsReplicationRecipeToolOptions(String name, String description) { - this(name, description, true); - } - - HdfsReplicationRecipeToolOptions(String name, String description, boolean isRequired) { - this.name = name; - this.description = description; - this.isRequired = isRequired; - } - - public String getName() { - return this.name; - } - - public String getDescription() { - return description; - } - - public boolean isRequired() { - return isRequired; - } - - @Override - public String toString() { - return getName(); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java b/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java deleted file mode 100644 index 8b39673..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java +++ /dev/null @@ -1,196 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hive.hcatalog.api.HCatClient; -import org.apache.hive.hcatalog.api.HCatDatabase; -import org.apache.hive.hcatalog.api.HCatTable; -import org.apache.hive.hcatalog.api.ObjectNotFoundException; -import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; -import org.apache.hive.hcatalog.common.HCatException; - -import java.io.IOException; -import java.util.Properties; - -/** - * Hive Replication recipe tool for Falcon recipes. - */ -public class HiveReplicationRecipeTool implements Recipe { - private static final String ALL_TABLES = "*"; - - @Override - public void validate(final Properties recipeProperties) throws Exception { - for (HiveReplicationRecipeToolOptions option : HiveReplicationRecipeToolOptions.values()) { - if (recipeProperties.getProperty(option.getName()) == null && option.isRequired()) { - throw new IllegalArgumentException("Missing argument: " + option.getName()); - } - } - - HCatClient sourceMetastoreClient = null; - HCatClient targetMetastoreClient = null; - try { - // Validate if DB exists - source and target - sourceMetastoreClient = getHiveMetaStoreClient( - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_SOURCE_METASTORE_URI.getName()), - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_SOURCE_HIVE_METASTORE_KERBEROS_PRINCIPAL.getName()), - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_SOURCE_HIVE2_KERBEROS_PRINCIPAL.getName())); - - String sourceDbList = recipeProperties.getProperty( - HiveReplicationRecipeToolOptions.REPLICATION_SOURCE_DATABASE.getName()); - - if (StringUtils.isEmpty(sourceDbList)) { - throw new Exception("No source DB specified in property file"); - } - - String sourceTableList = recipeProperties.getProperty( - HiveReplicationRecipeToolOptions.REPLICATION_SOURCE_TABLE.getName()); - if (StringUtils.isEmpty(sourceTableList)) { - throw new Exception("No source table specified in property file. For DB replication please specify * " - + "for sourceTable"); - } - - String[] srcDbs = sourceDbList.split(","); - if (srcDbs.length <= 0) { - throw new Exception("No source DB specified in property file"); - } - for (String db : srcDbs) { - if (!dbExists(sourceMetastoreClient, db)) { - throw new Exception("Database " + db + " doesn't exist on source cluster"); - } - } - - if (!sourceTableList.equals(ALL_TABLES)) { - String[] srcTables = sourceTableList.split(","); - if (srcTables.length > 0) { - for (String table : srcTables) { - if (!tableExists(sourceMetastoreClient, srcDbs[0], table)) { - throw new Exception("Table " + table + " doesn't exist on source cluster"); - } - } - } - } - - targetMetastoreClient = getHiveMetaStoreClient( - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_TARGET_METASTORE_URI.getName()), - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_TARGET_HIVE_METASTORE_KERBEROS_PRINCIPAL.getName()), - recipeProperties.getProperty(HiveReplicationRecipeToolOptions - .REPLICATION_TARGET_HIVE2_KERBEROS_PRINCIPAL.getName())); - // Verify db exists on target - for (String db : srcDbs) { - if (!dbExists(targetMetastoreClient, db)) { - throw new Exception("Database " + db + " doesn't exist on target cluster"); - } - } - } finally { - if (sourceMetastoreClient != null) { - sourceMetastoreClient.close(); - } - if (targetMetastoreClient != null) { - targetMetastoreClient.close(); - } - } - } - - @Override - public Properties getAdditionalSystemProperties(final Properties recipeProperties) { - Properties additionalProperties = new Properties(); - String recipeName = recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName()); - // Add recipe name as Hive DR job - additionalProperties.put(HiveReplicationRecipeToolOptions.HIVE_DR_JOB_NAME.getName(), recipeName); - additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_RUN.getName(), - recipeProperties.getProperty(RecipeToolOptions.CLUSTER_NAME.getName())); - additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_RUN_WRITE_EP.getName(), - recipeProperties.getProperty(RecipeToolOptions.CLUSTER_HDFS_WRITE_ENDPOINT.getName())); - if (StringUtils.isNotEmpty(recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName()))) { - additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_NN_KERBEROS_PRINCIPAL.getName(), - recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName())); - } - return additionalProperties; - } - - private HCatClient getHiveMetaStoreClient(String metastoreUrl, String metastorePrincipal, - String hive2Principal) throws Exception { - try { - HiveConf hcatConf = createHiveConf(new Configuration(false), metastoreUrl, - metastorePrincipal, hive2Principal); - return HCatClient.create(hcatConf); - } catch (IOException e) { - throw new Exception("Exception creating HCatClient: " + e.getMessage(), e); - } - } - - private static HiveConf createHiveConf(Configuration conf, String metastoreUrl, String metastorePrincipal, - String hive2Principal) throws IOException { - HiveConf hcatConf = new HiveConf(conf, HiveConf.class); - - hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUrl); - hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, - HCatSemanticAnalyzer.class.getName()); - hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); - - hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); - hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - if (StringUtils.isNotEmpty(metastorePrincipal)) { - hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, metastorePrincipal); - hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true"); - hcatConf.set(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname, "true"); - } - if (StringUtils.isNotEmpty(hive2Principal)) { - hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, hive2Principal); - hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, "kerberos"); - } - - return hcatConf; - } - - private static boolean tableExists(HCatClient client, final String database, final String tableName) - throws Exception { - try { - HCatTable table = client.getTable(database, tableName); - return table != null; - } catch (ObjectNotFoundException e) { - System.out.println(e.getMessage()); - return false; - } catch (HCatException e) { - throw new Exception("Exception checking if the table exists:" + e.getMessage(), e); - } - } - - private static boolean dbExists(HCatClient client, final String database) - throws Exception { - try { - HCatDatabase db = client.getDatabase(database); - return db != null; - } catch (ObjectNotFoundException e) { - System.out.println(e.getMessage()); - return false; - } catch (HCatException e) { - throw new Exception("Exception checking if the db exists:" + e.getMessage(), e); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java b/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java deleted file mode 100644 index ec0465d..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -/** - * Hive Recipe tool options. - */ -public enum HiveReplicationRecipeToolOptions { - REPLICATION_SOURCE_CLUSTER("sourceCluster", "Replication source cluster name"), - REPLICATION_SOURCE_METASTORE_URI("sourceMetastoreUri", "Source Hive metastore uri"), - REPLICATION_SOURCE_HS2_URI("sourceHiveServer2Uri", "Source HS2 uri"), - REPLICATION_SOURCE_DATABASE("sourceDatabase", "List of databases to replicate"), - REPLICATION_SOURCE_TABLE("sourceTable", "List of tables to replicate"), - REPLICATION_SOURCE_STAGING_PATH("sourceStagingPath", "Location of source staging path"), - REPLICATION_SOURCE_NN("sourceNN", "Source name node"), - REPLICATION_SOURCE_NN_KERBEROS_PRINCIPAL("sourceNNKerberosPrincipal", "Source name node kerberos principal", false), - REPLICATION_SOURCE_HIVE_METASTORE_KERBEROS_PRINCIPAL("sourceHiveMetastoreKerberosPrincipal", - "Source hive metastore kerberos principal", false), - REPLICATION_SOURCE_HIVE2_KERBEROS_PRINCIPAL("sourceHive2KerberosPrincipal", - "Source hiveserver2 kerberos principal", false), - - REPLICATION_TARGET_CLUSTER("targetCluster", "Replication target cluster name"), - REPLICATION_TARGET_METASTORE_URI("targetMetastoreUri", "Target Hive metastore uri"), - REPLICATION_TARGET_HS2_URI("targetHiveServer2Uri", "Target HS2 uri"), - REPLICATION_TARGET_STAGING_PATH("targetStagingPath", "Location of target staging path"), - REPLICATION_TARGET_NN("targetNN", "Target name node"), - REPLICATION_TARGET_NN_KERBEROS_PRINCIPAL("targetNNKerberosPrincipal", "Target name node kerberos principal", false), - REPLICATION_TARGET_HIVE_METASTORE_KERBEROS_PRINCIPAL("targetHiveMetastoreKerberosPrincipal", - "Target hive metastore kerberos principal", false), - REPLICATION_TARGET_HIVE2_KERBEROS_PRINCIPAL("targetHive2KerberosPrincipal", - "Target hiveserver2 kerberos principal", false), - - REPLICATION_MAX_EVENTS("maxEvents", "Maximum events to replicate"), - REPLICATION_MAX_MAPS("replicationMaxMaps", "Maximum number of maps used during replication"), - DISTCP_MAX_MAPS("distcpMaxMaps", "Maximum number of maps used during distcp"), - REPLICATION_MAP_BANDWIDTH_IN_MB("distcpMapBandwidth", "Bandwidth in MB/s used by each mapper during replication"), - CLUSTER_FOR_JOB_RUN("clusterForJobRun", "Cluster on which replication job runs", false), - CLUSTER_FOR_JOB_NN_KERBEROS_PRINCIPAL("clusterForJobNNKerberosPrincipal", - "Write EP of cluster on which replication job runs", false), - CLUSTER_FOR_JOB_RUN_WRITE_EP("clusterForJobRunWriteEP", "Write EP of cluster on which replication job runs", false), - HIVE_DR_JOB_NAME("drJobName", "Unique hive DR job name", false); - - private final String name; - private final String description; - private final boolean isRequired; - - HiveReplicationRecipeToolOptions(String name, String description) { - this(name, description, true); - } - - HiveReplicationRecipeToolOptions(String name, String description, boolean isRequired) { - this.name = name; - this.description = description; - this.isRequired = isRequired; - } - - public String getName() { - return this.name; - } - - public String getDescription() { - return description; - } - - public boolean isRequired() { - return isRequired; - } - - @Override - public String toString() { - return getName(); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/Recipe.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/Recipe.java b/client/src/main/java/org/apache/falcon/recipe/Recipe.java deleted file mode 100644 index 609131d..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/Recipe.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import java.util.Properties; - -/** - * Recipe interface. - */ -public interface Recipe { - void validate(final Properties recipeProperties) throws Exception; - Properties getAdditionalSystemProperties(final Properties recipeProperties); -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java b/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java deleted file mode 100644 index 32b0871..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import org.apache.falcon.cli.FalconCLI.RecipeOperation; - -/** - * Recipe factory. - */ -public final class RecipeFactory { - - private RecipeFactory() { - } - - public static Recipe getRecipeToolType(String recipeType) { - if (recipeType == null) { - return null; - } - - if (RecipeOperation.HDFS_REPLICATION.toString().equalsIgnoreCase(recipeType)) { - return new HdfsReplicationRecipeTool(); - } else if (RecipeOperation.HIVE_DISASTER_RECOVERY.toString().equalsIgnoreCase(recipeType)) { - return new HiveReplicationRecipeTool(); - } - return null; - } - -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java b/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java deleted file mode 100644 index 243ff4d..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java +++ /dev/null @@ -1,285 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.recipe.util.RecipeProcessBuilderUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.security.PrivilegedExceptionAction; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Base recipe tool for Falcon recipes. - */ -public class RecipeTool extends Configured implements Tool { - private static final String HDFS_WF_PATH = "falcon" + File.separator + "recipes" + File.separator; - private static final FsPermission FS_PERMISSION = - new FsPermission(FsAction.ALL, FsAction.READ, FsAction.NONE); - private static final String FS_DEFAULT_NAME_KEY = "fs.defaultFS"; - private static final String NN_PRINCIPAL = "dfs.namenode.kerberos.principal"; - - public static void main(String[] args) throws Exception { - ToolRunner.run(new Configuration(), new RecipeTool(), args); - } - - @Override - public int run(String[] arguments) throws Exception { - - Map<RecipeToolArgs, String> argMap = setupArgs(arguments); - if (argMap == null || argMap.isEmpty()) { - throw new Exception("Arguments passed to recipe is null"); - } - Configuration conf = getConf(); - String recipePropertiesFilePath = argMap.get(RecipeToolArgs.RECIPE_PROPERTIES_FILE_ARG); - Properties recipeProperties = loadProperties(recipePropertiesFilePath); - validateProperties(recipeProperties); - - String recipeOperation = argMap.get(RecipeToolArgs.RECIPE_OPERATION_ARG); - Recipe recipeType = RecipeFactory.getRecipeToolType(recipeOperation); - if (recipeType != null) { - recipeType.validate(recipeProperties); - Properties props = recipeType.getAdditionalSystemProperties(recipeProperties); - if (props != null && !props.isEmpty()) { - recipeProperties.putAll(props); - } - } - - String processFilename; - - FileSystem fs = getFileSystemForHdfs(recipeProperties, conf); - validateArtifacts(recipeProperties, fs); - - String recipeName = recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName()); - copyFilesToHdfsIfRequired(recipeProperties, fs, recipeName); - - processFilename = RecipeProcessBuilderUtils.createProcessFromTemplate(argMap.get(RecipeToolArgs - .RECIPE_FILE_ARG), recipeProperties, argMap.get(RecipeToolArgs.RECIPE_PROCESS_XML_FILE_PATH_ARG)); - - - System.out.println("Generated process file to be scheduled: "); - System.out.println(FileUtils.readFileToString(new File(processFilename))); - - System.out.println("Completed recipe processing"); - return 0; - } - - private Map<RecipeToolArgs, String> setupArgs(final String[] arguments) throws ParseException { - Options options = new Options(); - Map<RecipeToolArgs, String> argMap = new HashMap<RecipeToolArgs, String>(); - - for (RecipeToolArgs arg : RecipeToolArgs.values()) { - addOption(options, arg, arg.isRequired()); - } - - CommandLine cmd = new GnuParser().parse(options, arguments); - for (RecipeToolArgs arg : RecipeToolArgs.values()) { - String optionValue = arg.getOptionValue(cmd); - if (StringUtils.isNotEmpty(optionValue)) { - argMap.put(arg, optionValue); - } - } - return argMap; - } - - private static void addOption(final Options options, final RecipeToolArgs arg, - final boolean isRequired) { - Option option = arg.getOption(); - option.setRequired(isRequired); - options.addOption(option); - } - - private static void validateProperties(final Properties recipeProperties) { - for (RecipeToolOptions option : RecipeToolOptions.values()) { - if (recipeProperties.getProperty(option.getName()) == null && option.isRequired()) { - throw new IllegalArgumentException("Missing argument: " + option.getName()); - } - } - } - - private static Properties loadProperties(final String propertiesFilePath) throws Exception { - InputStream inputStream = null; - try { - inputStream = new FileInputStream(propertiesFilePath); - Properties prop = new Properties(); - prop.load(inputStream); - return prop; - } finally { - IOUtils.closeQuietly(inputStream); - } - } - - private static void validateArtifacts(final Properties recipeProperties, final FileSystem fs) throws Exception { - // validate the WF path - String wfPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_PATH.getName()); - - // Check if file exists on HDFS - if (StringUtils.isNotEmpty(wfPath) && !fs.exists(new Path(wfPath))) { - // If the file doesn't exist locally throw exception - if (!doesFileExist(wfPath)) { - throw new Exception("Recipe workflow file does not exist : " + wfPath + " on local FS or HDFS"); - } - } - - // validate lib path - String libPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_LIB_PATH.getName()); - if (StringUtils.isNotEmpty(libPath) && !fs.exists(new Path(libPath))) { - if (!doesFileExist(libPath)) { - throw new Exception("Recipe lib file path does not exist : " + libPath + " on local FS or HDFS"); - } - } - } - - private static void copyFilesToHdfsIfRequired(final Properties recipeProperties, - final FileSystem fs, - final String recipeName) throws Exception { - - String hdfsPath = HDFS_WF_PATH + recipeName + File.separator; - - String recipeWfPathName = RecipeToolOptions.WORKFLOW_PATH.getName(); - String wfPath = recipeProperties.getProperty(recipeWfPathName); - String wfPathValue; - - // Copy only if files are on local FS - if (StringUtils.isNotEmpty(wfPath) && !fs.exists(new Path(wfPath))) { - createDirOnHdfs(hdfsPath, fs); - if (new File(wfPath).isDirectory()) { - wfPathValue = hdfsPath + getLastPartOfPath(wfPath); - copyFileFromLocalToHdfs(wfPath, hdfsPath, true, wfPathValue, fs); - } else { - wfPathValue = hdfsPath + new File(wfPath).getName(); - copyFileFromLocalToHdfs(wfPath, hdfsPath, false, null, fs); - } - // Update the property with the hdfs path - recipeProperties.setProperty(recipeWfPathName, - fs.getFileStatus(new Path(wfPathValue)).getPath().toString()); - System.out.println("Copied WF to: " + recipeProperties.getProperty(recipeWfPathName)); - } - - String recipeWfLibPathName = RecipeToolOptions.WORKFLOW_LIB_PATH.getName(); - String libPath = recipeProperties.getProperty(recipeWfLibPathName); - String libPathValue; - // Copy only if files are on local FS - boolean isLibPathEmpty = StringUtils.isEmpty(libPath); - if (!isLibPathEmpty && !fs.exists(new Path(libPath))) { - if (new File(libPath).isDirectory()) { - libPathValue = hdfsPath + getLastPartOfPath(libPath); - copyFileFromLocalToHdfs(libPath, hdfsPath, true, libPathValue, fs); - } else { - libPathValue = hdfsPath + "lib" + File.separator + new File(libPath).getName(); - copyFileFromLocalToHdfs(libPath, libPathValue, false, null, fs); - } - - // Update the property with the hdfs path - recipeProperties.setProperty(recipeWfLibPathName, - fs.getFileStatus(new Path(libPathValue)).getPath().toString()); - System.out.println("Copied WF libs to: " + recipeProperties.getProperty(recipeWfLibPathName)); - } else if (isLibPathEmpty) { - // Replace ##workflow.lib.path## with "" to ignore lib in workflow template - recipeProperties.setProperty(recipeWfLibPathName, ""); - } - } - - private static String getLastPartOfPath(final String path) { - String normalizedWfPath = FilenameUtils.normalizeNoEndSeparator(path); - return (normalizedWfPath == null) ? FilenameUtils.getName(path) - : FilenameUtils.getName(normalizedWfPath); - } - - private static void createDirOnHdfs(String path, FileSystem fs) throws IOException { - Path hdfsPath = new Path(path); - if (!fs.exists(hdfsPath)) { - FileSystem.mkdirs(fs, hdfsPath, FS_PERMISSION); - } - } - - private static boolean doesFileExist(final String filename) { - return new File(filename).exists(); - } - - private static void copyFileFromLocalToHdfs(final String localFilePath, - final String hdfsFilePath, - final boolean copyDir, - final String hdfsFileDirPath, - final FileSystem fs) throws IOException { - /* If directory already exists and has contents, copyFromLocalFile with overwrite set to yes will fail with - * "Target is a directory". Delete the directory */ - if (copyDir) { - Path hdfsPath = new Path(hdfsFileDirPath); - fs.delete(hdfsPath, true); - } - - /* For cases where validation of process entity file fails, the artifacts would have been already copied to - * HDFS. Set overwrite to true so that next submit recipe copies updated artifacts from local FS to HDFS */ - fs.copyFromLocalFile(false, true, new Path(localFilePath), new Path(hdfsFilePath)); - } - - private FileSystem getFileSystemForHdfs(final Properties recipeProperties, - final Configuration conf) throws Exception { - String storageEndpoint = RecipeToolOptions.CLUSTER_HDFS_WRITE_ENDPOINT.getName(); - String nameNode = recipeProperties.getProperty(storageEndpoint); - conf.set(FS_DEFAULT_NAME_KEY, nameNode); - if (UserGroupInformation.isSecurityEnabled()) { - String nameNodePrincipal = recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName()); - conf.set(NN_PRINCIPAL, nameNodePrincipal); - } - return createFileSystem(UserGroupInformation.getLoginUser(), new URI(nameNode), conf); - } - - private FileSystem createFileSystem(UserGroupInformation ugi, final URI uri, - final Configuration conf) throws Exception { - try { - final String proxyUserName = ugi.getShortUserName(); - if (proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) { - return FileSystem.get(uri, conf); - } - - return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() { - public FileSystem run() throws Exception { - return FileSystem.get(uri, conf); - } - }); - } catch (InterruptedException ex) { - throw new IOException("Exception creating FileSystem:" + ex.getMessage(), ex); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java b/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java deleted file mode 100644 index 79d8f18..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Option; - -/** - * Recipe tool args. - */ -public enum RecipeToolArgs { - RECIPE_FILE_ARG("file", "recipe template file path"), - RECIPE_PROPERTIES_FILE_ARG("propertiesFile", "recipe properties file path"), - RECIPE_PROCESS_XML_FILE_PATH_ARG( - "recipeProcessFilePath", "file path of recipe process to be submitted"), - RECIPE_OPERATION_ARG("recipeOperation", "recipe operation"); - - private final String name; - private final String description; - private final boolean isRequired; - RecipeToolArgs(String name, String description) { - this(name, description, true); - } - - RecipeToolArgs(String name, String description, boolean isRequired) { - this.name = name; - this.description = description; - this.isRequired = isRequired; - } - - public Option getOption() { - return new Option(this.name, true, this.description); - } - - public String getName() { - return this.name; - } - - public String getDescription() { - return description; - } - - public boolean isRequired() { - return isRequired; - } - - public String getOptionValue(CommandLine cmd) { - return cmd.getOptionValue(this.name); - } - - @Override - public String toString() { - return getName(); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java b/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java deleted file mode 100644 index 2a7a7a0..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe; - -import java.util.Map; -import java.util.HashMap; - -/** - * Recipe tool options. - */ -public enum RecipeToolOptions { - RECIPE_NAME("falcon.recipe.name", "Recipe name", false), - CLUSTER_NAME("falcon.recipe.cluster.name", "Cluster name where replication job should run", false), - CLUSTER_HDFS_WRITE_ENDPOINT( - "falcon.recipe.cluster.hdfs.writeEndPoint", "Cluster HDFS write endpoint"), - CLUSTER_VALIDITY_START("falcon.recipe.cluster.validity.start", "Source cluster validity start", false), - CLUSTER_VALIDITY_END("falcon.recipe.cluster.validity.end", "Source cluster validity end", false), - WORKFLOW_NAME("falcon.recipe.workflow.name", "Workflow name", false), - WORKFLOW_PATH("falcon.recipe.workflow.path", "Workflow path", false), - WORKFLOW_LIB_PATH("falcon.recipe.workflow.lib.path", "WF lib path", false), - PROCESS_FREQUENCY("falcon.recipe.process.frequency", "Process frequency", false), - RETRY_POLICY("falcon.recipe.retry.policy", "Retry policy", false), - RETRY_DELAY("falcon.recipe.retry.delay", "Retry delay", false), - RETRY_ATTEMPTS("falcon.recipe.retry.attempts", "Retry attempts", false), - RETRY_ON_TIMEOUT("falcon.recipe.retry.onTimeout", "Retry onTimeout", false), - RECIPE_TAGS("falcon.recipe.tags", "Recipe tags", false), - RECIPE_ACL_OWNER("falcon.recipe.acl.owner", "Recipe acl owner", false), - RECIPE_ACL_GROUP("falcon.recipe.acl.group", "Recipe acl group", false), - RECIPE_ACL_PERMISSION("falcon.recipe.acl.permission", "Recipe acl permission", false), - RECIPE_NN_PRINCIPAL("falcon.recipe.nn.principal", "Recipe DFS NN principal", false), - RECIPE_NOTIFICATION_TYPE("falcon.recipe.notification.type", "Recipe Notification Type", false), - RECIPE_NOTIFICATION_ADDRESS("falcon.recipe.notification.receivers", "Recipe Email Notification receivers", false); - - private final String name; - private final String description; - private final boolean isRequired; - - private static Map<String, RecipeToolOptions> optionsMap = new HashMap<>(); - static { - for (RecipeToolOptions c : RecipeToolOptions.values()) { - optionsMap.put(c.getName(), c); - } - } - - public static Map<String, RecipeToolOptions> getOptionsMap() { - return optionsMap; - } - - RecipeToolOptions(String name, String description) { - this(name, description, true); - } - - RecipeToolOptions(String name, String description, boolean isRequired) { - this.name = name; - this.description = description; - this.isRequired = isRequired; - } - - public String getName() { - return this.name; - } - - public String getDescription() { - return description; - } - - public boolean isRequired() { - return isRequired; - } - - @Override - public String toString() { - return getName(); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java b/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java deleted file mode 100644 index 550ca1b..0000000 --- a/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java +++ /dev/null @@ -1,293 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.recipe.util; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.falcon.entity.v0.Entity; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.Frequency; -import org.apache.falcon.entity.v0.SchemaHelper; -import org.apache.falcon.entity.v0.process.ACL; -import org.apache.falcon.entity.v0.process.Cluster; -import org.apache.falcon.entity.v0.process.Notification; -import org.apache.falcon.entity.v0.process.PolicyType; -import org.apache.falcon.entity.v0.process.Property; -import org.apache.falcon.entity.v0.process.Retry; -import org.apache.falcon.entity.v0.process.Workflow; -import org.apache.falcon.recipe.RecipeToolOptions; - -import javax.xml.bind.JAXBException; -import javax.xml.bind.Unmarshaller; -import javax.xml.bind.ValidationEvent; -import javax.xml.bind.ValidationEventHandler; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.OutputStream; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Recipe builder utility. - */ -public final class RecipeProcessBuilderUtils { - - private static final Pattern RECIPE_VAR_PATTERN = Pattern.compile("##[A-Za-z0-9_.]*##"); - - private RecipeProcessBuilderUtils() { - } - - public static String createProcessFromTemplate(final String processTemplateFile, final Properties recipeProperties, - final String processFilename) throws Exception { - org.apache.falcon.entity.v0.process.Process process = bindAttributesInTemplate( - processTemplateFile, recipeProperties); - String recipeProcessFilename = createProcessXmlFile(processFilename, process); - - validateProcessXmlFile(recipeProcessFilename); - return recipeProcessFilename; - } - - private static org.apache.falcon.entity.v0.process.Process - bindAttributesInTemplate(final String templateFile, final Properties recipeProperties) - throws Exception { - if (templateFile == null || recipeProperties == null) { - throw new IllegalArgumentException("Invalid arguments passed"); - } - - Unmarshaller unmarshaller = EntityType.PROCESS.getUnmarshaller(); - // Validation can be skipped for unmarshalling as we want to bind tempalte with the properties. Vaildation is - // hanles as part of marshalling - unmarshaller.setSchema(null); - unmarshaller.setEventHandler(new ValidationEventHandler() { - public boolean handleEvent(ValidationEvent validationEvent) { - return true; - } - } - ); - - URL processResourceUrl = new File(templateFile).toURI().toURL(); - org.apache.falcon.entity.v0.process.Process process = - (org.apache.falcon.entity.v0.process.Process) unmarshaller.unmarshal(processResourceUrl); - - /* For optional properties user might directly set them in the process xml and might not set it in properties - file. Before doing the submission validation is done to confirm process xml doesn't have RECIPE_VAR_PATTERN - */ - - String processName = recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName()); - if (StringUtils.isNotEmpty(processName)) { - process.setName(processName); - } - - // DR process template has only one cluster - bindClusterProperties(process.getClusters().getClusters().get(0), recipeProperties); - - // bind scheduling properties - String processFrequency = recipeProperties.getProperty(RecipeToolOptions.PROCESS_FREQUENCY.getName()); - if (StringUtils.isNotEmpty(processFrequency)) { - process.setFrequency(Frequency.fromString(processFrequency)); - } - - bindWorkflowProperties(process.getWorkflow(), recipeProperties); - bindRetryProperties(process.getRetry(), recipeProperties); - bindNotificationProperties(process.getNotification(), recipeProperties); - bindACLProperties(process.getACL(), recipeProperties); - bindTagsProperties(process, recipeProperties); - bindCustomProperties(process.getProperties(), recipeProperties); - - return process; - } - - private static void bindClusterProperties(final Cluster cluster, - final Properties recipeProperties) { - // DR process template has only one cluster - String clusterName = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_NAME.getName()); - if (StringUtils.isNotEmpty(clusterName)) { - cluster.setName(clusterName); - } - - String clusterStartValidity = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_START.getName()); - if (StringUtils.isNotEmpty(clusterStartValidity)) { - cluster.getValidity().setStart(SchemaHelper.parseDateUTC(clusterStartValidity)); - } - - String clusterEndValidity = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_END.getName()); - if (StringUtils.isNotEmpty(clusterEndValidity)) { - cluster.getValidity().setEnd(SchemaHelper.parseDateUTC(clusterEndValidity)); - } - } - - private static void bindWorkflowProperties(final Workflow wf, - final Properties recipeProperties) { - String wfName = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_NAME.getName()); - if (StringUtils.isNotEmpty(wfName)) { - wf.setName(wfName); - } - - String wfLibPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_LIB_PATH.getName()); - if (StringUtils.isNotEmpty(wfLibPath)) { - wf.setLib(wfLibPath); - } else if (wf.getLib().startsWith("##")) { - wf.setLib(""); - } - - String wfPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_PATH.getName()); - if (StringUtils.isNotEmpty(wfPath)) { - wf.setPath(wfPath); - } - } - - private static void bindRetryProperties(final Retry processRetry, - final Properties recipeProperties) { - String retryPolicy = recipeProperties.getProperty(RecipeToolOptions.RETRY_POLICY.getName()); - if (StringUtils.isNotEmpty(retryPolicy)) { - processRetry.setPolicy(PolicyType.fromValue(retryPolicy)); - } - - String retryAttempts = recipeProperties.getProperty(RecipeToolOptions.RETRY_ATTEMPTS.getName()); - if (StringUtils.isNotEmpty(retryAttempts)) { - processRetry.setAttempts(Integer.parseInt(retryAttempts)); - } - - String retryDelay = recipeProperties.getProperty(RecipeToolOptions.RETRY_DELAY.getName()); - if (StringUtils.isNotEmpty(retryDelay)) { - processRetry.setDelay(Frequency.fromString(retryDelay)); - } - - String retryOnTimeout = recipeProperties.getProperty(RecipeToolOptions.RETRY_ON_TIMEOUT.getName()); - if (StringUtils.isNotEmpty(retryOnTimeout)) { - processRetry.setOnTimeout(Boolean.valueOf(retryOnTimeout)); - } - } - - private static void bindNotificationProperties(final Notification processNotification, - final Properties recipeProperties) { - processNotification.setType(recipeProperties.getProperty( - RecipeToolOptions.RECIPE_NOTIFICATION_TYPE.getName())); - - String notificationAddress = recipeProperties.getProperty( - RecipeToolOptions.RECIPE_NOTIFICATION_ADDRESS.getName()); - if (StringUtils.isNotBlank(notificationAddress)) { - processNotification.setTo(notificationAddress); - } else { - processNotification.setTo("NA"); - } - } - - private static void bindACLProperties(final ACL acl, - final Properties recipeProperties) { - String aclowner = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_OWNER.getName()); - if (StringUtils.isNotEmpty(aclowner)) { - acl.setOwner(aclowner); - } - - String aclGroup = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_GROUP.getName()); - if (StringUtils.isNotEmpty(aclGroup)) { - acl.setGroup(aclGroup); - } - - String aclPermission = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_PERMISSION.getName()); - if (StringUtils.isNotEmpty(aclPermission)) { - acl.setPermission(aclPermission); - } - } - - private static void bindTagsProperties(final org.apache.falcon.entity.v0.process.Process process, - final Properties recipeProperties) { - String falconSystemTags = process.getTags(); - String tags = recipeProperties.getProperty(RecipeToolOptions.RECIPE_TAGS.getName()); - if (StringUtils.isNotEmpty(tags)) { - if (StringUtils.isNotEmpty(falconSystemTags)) { - tags += ", " + falconSystemTags; - } - process.setTags(tags); - } - } - - - private static void bindCustomProperties(final org.apache.falcon.entity.v0.process.Properties customProperties, - final Properties recipeProperties) { - List<Property> propertyList = new ArrayList<>(); - - for (Map.Entry<Object, Object> recipeProperty : recipeProperties.entrySet()) { - if (RecipeToolOptions.getOptionsMap().get(recipeProperty.getKey().toString()) == null) { - addProperty(propertyList, (String) recipeProperty.getKey(), (String) recipeProperty.getValue()); - } - } - - customProperties.getProperties().addAll(propertyList); - } - - private static void addProperty(List<Property> propertyList, String name, String value) { - Property prop = new Property(); - prop.setName(name); - prop.setValue(value); - propertyList.add(prop); - } - - private static String createProcessXmlFile(final String outFilename, - final Entity entity) throws Exception { - if (outFilename == null || entity == null) { - throw new IllegalArgumentException("Invalid arguments passed"); - } - - EntityType type = EntityType.PROCESS; - OutputStream out = null; - try { - out = new FileOutputStream(outFilename); - type.getMarshaller().marshal(entity, out); - } catch (JAXBException e) { - throw new Exception("Unable to serialize the entity object " + type + "/" + entity.getName(), e); - } finally { - IOUtils.closeQuietly(out); - } - return outFilename; - } - - private static void validateProcessXmlFile(final String processFileName) throws Exception { - if (processFileName == null) { - throw new IllegalArgumentException("Invalid arguments passed"); - } - - String line; - BufferedReader reader = null; - - try { - reader = new BufferedReader(new FileReader(processFileName)); - while ((line = reader.readLine()) != null) { - Matcher matcher = RECIPE_VAR_PATTERN.matcher(line); - if (matcher.find()) { - String variable = line.substring(matcher.start(), matcher.end()); - throw new Exception("Match not found for the template: " + variable - + " in recipe template file. Please add it in recipe properties file"); - } - } - } finally { - IOUtils.closeQuietly(reader); - } - - } - -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/client/src/main/java/org/apache/falcon/resource/APIResult.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/falcon/resource/APIResult.java b/client/src/main/java/org/apache/falcon/resource/APIResult.java deleted file mode 100644 index e67eb3a..0000000 --- a/client/src/main/java/org/apache/falcon/resource/APIResult.java +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.resource; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; -import javax.xml.bind.Marshaller; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlRootElement; -import java.io.StringWriter; - -/** - * APIResult is the output returned by all the APIs; status-SUCCEEDED or FAILED - * message- detailed message. - */ -@XmlRootElement(name = "result") -@XmlAccessorType(XmlAccessType.FIELD) [email protected]({"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) -public class APIResult { - - private Status status; - - private String message; - - private String requestId; - - private static final JAXBContext JAXB_CONTEXT; - - static { - try { - JAXB_CONTEXT = JAXBContext.newInstance(APIResult.class); - } catch (JAXBException e) { - throw new RuntimeException(e); - } - } - - /** - * API Result status. - */ - public static enum Status { - SUCCEEDED, PARTIAL, FAILED - } - - public APIResult(Status status, String message) { - super(); - this.status = status; - this.message = message; - requestId = Thread.currentThread().getName(); - } - - protected APIResult() { - // private default constructor for JAXB - } - - public Status getStatus() { - return status; - } - - public String getMessage() { - return message; - } - - public String getRequestId() { - return requestId; - } - - public void setRequestId(String reqId) { - this.requestId = reqId; - } - - @Override - public String toString() { - try { - StringWriter stringWriter = new StringWriter(); - Marshaller marshaller = JAXB_CONTEXT.createMarshaller(); - marshaller.marshal(this, stringWriter); - return stringWriter.toString(); - } catch (JAXBException e) { - return e.getMessage(); - } - } - - public Object[] getCollection() { - return null; - } - - public void setCollection(Object[] items) { - } -}
