Repository: asterixdb Updated Branches: refs/heads/master 7722e5d4d -> d25513ccb
http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java index 53c3dc0..2c457a9 100644 --- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java +++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java @@ -37,7 +37,6 @@ import org.apache.asterix.common.context.TransactionSubsystemProvider; import org.apache.asterix.common.dataflow.ICcApplicationContext; import org.apache.asterix.common.exceptions.ACIDException; import org.apache.asterix.common.exceptions.AsterixException; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.transactions.IRecoveryManager; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.external.indexing.IndexingConstants; @@ -293,7 +292,8 @@ public class DatasetUtil { LOGGER.info("CREATING File Splits: " + sb.toString()); Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); - //prepare a LocalResourceMetadata which will be stored in NC's local resource repository + // prepare a LocalResourceMetadata which will be stored in NC's local resource + // repository IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, index, itemType, metaItemType, compactionInfo.first, compactionInfo.second); IndexBuilderFactory indexBuilderFactory = @@ -393,74 +393,69 @@ public class DatasetUtil { int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; ARecordType itemType = (ARecordType) metadataProvider.findType(dataset); ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset); - try { - Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), - dataset.getDatasetName()); - Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = - metadataProvider.getSplitProviderAndConstraints(dataset); - - // prepare callback - JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId(); - int[] primaryKeyFields = new int[numKeys]; - for (int i = 0; i < numKeys; i++) { - primaryKeyFields[i] = i; - } - boolean hasSecondaries = - metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1; - IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); - IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory( - storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields); - ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory( - storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields); - IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory( - storageComponentProvider.getStorageManager(), splitsAndConstraint.first); - LSMPrimaryUpsertOperatorDescriptor op; - ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() - + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; - ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() - + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; - IDataFormat dataFormat = metadataProvider.getDataFormat(); - - // add the previous record first - int f = 0; - outputSerDes[f] = dataFormat.getSerdeProvider().getSerializerDeserializer(itemType); + Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), + dataset.getDatasetName()); + Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = + metadataProvider.getSplitProviderAndConstraints(dataset); + + // prepare callback + JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId(); + int[] primaryKeyFields = new int[numKeys]; + for (int i = 0; i < numKeys; i++) { + primaryKeyFields[i] = i; + } + boolean hasSecondaries = + metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1; + IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); + IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory( + storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields); + ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory( + storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields); + IIndexDataflowHelperFactory idfh = + new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first); + LSMPrimaryUpsertOperatorDescriptor op; + ITypeTraits[] outputTypeTraits = + new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; + ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; + IDataFormat dataFormat = metadataProvider.getDataFormat(); + + // add the previous record first + int f = 0; + outputSerDes[f] = dataFormat.getSerdeProvider().getSerializerDeserializer(itemType); + f++; + // add the previous meta second + if (dataset.hasMetaPart()) { + outputSerDes[f] = dataFormat.getSerdeProvider().getSerializerDeserializer(metaItemType); + outputTypeTraits[f] = dataFormat.getTypeTraitProvider().getTypeTrait(metaItemType); f++; - // add the previous meta second - if (dataset.hasMetaPart()) { - outputSerDes[f] = dataFormat.getSerdeProvider().getSerializerDeserializer(metaItemType); - outputTypeTraits[f] = dataFormat.getTypeTraitProvider().getTypeTrait(metaItemType); - f++; - } - // add the previous filter third - int fieldIdx = -1; - if (numFilterFields > 0) { - String filterField = DatasetUtil.getFilterField(dataset).get(0); - String[] fieldNames = itemType.getFieldNames(); - int i = 0; - for (; i < fieldNames.length; i++) { - if (fieldNames[i].equals(filterField)) { - break; - } + } + // add the previous filter third + int fieldIdx = -1; + if (numFilterFields > 0) { + String filterField = DatasetUtil.getFilterField(dataset).get(0); + String[] fieldNames = itemType.getFieldNames(); + int i = 0; + for (; i < fieldNames.length; i++) { + if (fieldNames[i].equals(filterField)) { + break; } - fieldIdx = i; - outputTypeTraits[f] = - dataFormat.getTypeTraitProvider().getTypeTrait(itemType.getFieldTypes()[fieldIdx]); - outputSerDes[f] = - dataFormat.getSerdeProvider().getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]); - f++; } - for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) { - outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j]; - outputSerDes[j + f] = inputRecordDesc.getFields()[j]; - } - RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits); - op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, - missingWriterFactory, modificationCallbackFactory, searchCallbackFactory, - dataset.getFrameOpCallbackFactory(), numKeys, itemType, fieldIdx, hasSecondaries); - return new Pair<>(op, splitsAndConstraint.second); - } catch (MetadataException me) { - throw new AlgebricksException(me); + fieldIdx = i; + outputTypeTraits[f] = dataFormat.getTypeTraitProvider().getTypeTrait(itemType.getFieldTypes()[fieldIdx]); + outputSerDes[f] = + dataFormat.getSerdeProvider().getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]); + f++; + } + for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) { + outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j]; + outputSerDes[j + f] = inputRecordDesc.getFields()[j]; } + RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits); + op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, + missingWriterFactory, modificationCallbackFactory, searchCallbackFactory, + dataset.getFrameOpCallbackFactory(), numKeys, itemType, fieldIdx, hasSecondaries); + return new Pair<>(op, splitsAndConstraint.second); } /** @@ -503,7 +498,7 @@ public class DatasetUtil { } public static boolean isFullyQualifiedName(String datasetName) { - return datasetName.indexOf('.') > 0; //NOSONAR a fully qualified name can't start with a . + return datasetName.indexOf('.') > 0; // NOSONAR a fully qualified name can't start with a . } public static String getFullyQualifiedName(Dataset dataset) { http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java index 5b7ea59..98cfc57 100644 --- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java +++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java @@ -25,7 +25,6 @@ import java.util.List; import org.apache.asterix.common.cluster.ClusterPartition; import org.apache.asterix.common.cluster.IClusterStateManager; import org.apache.asterix.common.config.ClusterProperties; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.utils.StoragePathUtil; import org.apache.asterix.metadata.MetadataManager; import org.apache.asterix.metadata.MetadataTransactionContext; @@ -59,16 +58,12 @@ public class SplitsAndConstraintsUtil { public static FileSplit[] getIndexSplits(IClusterStateManager clusterStateManager, Dataset dataset, String indexName, MetadataTransactionContext mdTxnCtx) throws AlgebricksException { - try { - NodeGroup nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()); - if (nodeGroup == null) { - throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName()); - } - List<String> nodeList = nodeGroup.getNodeNames(); - return getIndexSplits(clusterStateManager, dataset, indexName, nodeList); - } catch (MetadataException me) { - throw new AlgebricksException(me); + NodeGroup nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()); + if (nodeGroup == null) { + throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName()); } + List<String> nodeList = nodeGroup.getNodeNames(); + return getIndexSplits(clusterStateManager, dataset, indexName, nodeList); } public static FileSplit[] getIndexSplits(IClusterStateManager clusterStateManager, Dataset dataset, http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java index ff65994..40623bd 100644 --- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java +++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java @@ -25,6 +25,7 @@ import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator; import org.apache.asterix.metadata.api.IValueExtractor; +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; @@ -40,7 +41,7 @@ public class MetadataEntityValueExtractor<T> implements IValueExtractor<T> { @Override public T getValue(JobId jobId, ITupleReference tuple) - throws MetadataException, HyracksDataException, RemoteException { + throws AlgebricksException, HyracksDataException, RemoteException { return tupleReaderWriter.getMetadataEntityFromTuple(tuple); } } http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java index 5f0525b..22aea26 100644 --- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java +++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java @@ -24,9 +24,9 @@ import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.metadata.api.IValueExtractor; +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; import org.apache.hyracks.util.string.UTF8StringReader; @@ -49,7 +49,7 @@ public class NestedDatatypeNameValueExtractor implements IValueExtractor<String> private final UTF8StringReader reader = new UTF8StringReader(); @Override - public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException { + public String getValue(JobId jobId, ITupleReference tuple) throws AlgebricksException, HyracksDataException { byte[] serRecord = tuple.getFieldData(2); int recordStartOffset = tuple.getFieldStart(2); int recordLength = tuple.getFieldLength(2); http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java index 1928d7e..5f16543 100644 --- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java +++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java @@ -21,9 +21,9 @@ package org.apache.asterix.metadata.valueextractors; import java.nio.ByteBuffer; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.metadata.api.IValueExtractor; +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.api.dataflow.value.ITypeTraits; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; @@ -48,7 +48,8 @@ public class TupleCopyValueExtractor implements IValueExtractor<ITupleReference> } @Override - public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException { + public ITupleReference getValue(JobId jobId, ITupleReference tuple) + throws AlgebricksException, HyracksDataException { int numBytes = tupleWriter.bytesRequired(tuple); tupleBytes = new byte[numBytes]; tupleWriter.writeTuple(tuple, tupleBytes, 0); http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java index e5d0d7d..0198d74 100644 --- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java +++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java @@ -30,6 +30,7 @@ import org.apache.asterix.metadata.entities.InternalDatasetDetails; import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure; import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy; import org.apache.asterix.om.types.BuiltinType; +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; import org.junit.Assert; import org.junit.Test; @@ -37,7 +38,7 @@ import org.junit.Test; public class DatasetTupleTranslatorTest { @Test - public void test() throws MetadataException, IOException { + public void test() throws AlgebricksException, IOException { Integer[] indicators = { 0, 1, null }; for (Integer indicator : indicators) { Map<String, String> compactionPolicyProperties = new HashMap<>(); http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java index cdbaad3..32f65e1 100644 --- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java +++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java @@ -41,6 +41,7 @@ import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningS import org.apache.asterix.om.types.ARecordType; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.om.types.IAType; +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference; import org.junit.Assert; import org.junit.Test; @@ -48,7 +49,7 @@ import org.junit.Test; public class IndexTupleTranslatorTest { @Test - public void test() throws MetadataException, IOException { + public void test() throws AlgebricksException, IOException { Integer[] indicators = { 0, 1, null }; for (Integer indicator : indicators) { Map<String, String> compactionPolicyProperties = new HashMap<>(); http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/AdmDataGen.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/AdmDataGen.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/AdmDataGen.java index 70bf864..818444e 100644 --- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/AdmDataGen.java +++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/AdmDataGen.java @@ -46,8 +46,6 @@ import org.apache.asterix.common.annotations.RecordDataGenAnnotation; import org.apache.asterix.common.annotations.TypeDataGen; import org.apache.asterix.common.annotations.UndeclaredFieldsDataGen; import org.apache.asterix.common.exceptions.ACIDException; -import org.apache.asterix.common.exceptions.AsterixException; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.common.transactions.JobId; import org.apache.asterix.lang.aql.parser.AQLParserFactory; import org.apache.asterix.lang.aql.parser.ParseException; @@ -727,8 +725,8 @@ public class AdmDataGen { "list-val-file annotation cannot be used for field of type " + ti.getTypeTag()); } AbstractCollectionType act = (AbstractCollectionType) ti; - declaredFieldsGenerators[i] = new ListFromArrayGenerator(act, a, lvf.getMin(), - lvf.getMax()); + declaredFieldsGenerators[i] = + new ListFromArrayGenerator(act, a, lvf.getMin(), lvf.getMax()); break; } case VALFILESAMEINDEX: { @@ -774,9 +772,9 @@ public class AdmDataGen { } switch (fi.getValueType()) { case INT: { - declaredFieldsGenerators[i] = new IntIntervalGenerator( - Integer.parseInt(fi.getMin()), Integer.parseInt(fi.getMax()), prefix, - suffix); + declaredFieldsGenerators[i] = + new IntIntervalGenerator(Integer.parseInt(fi.getMin()), + Integer.parseInt(fi.getMax()), prefix, suffix); break; } case LONG: { @@ -785,9 +783,9 @@ public class AdmDataGen { break; } case DOUBLE: { - declaredFieldsGenerators[i] = new DoubleIntervalGenerator( - Double.parseDouble(fi.getMin()), Double.parseDouble(fi.getMax()), prefix, - suffix); + declaredFieldsGenerators[i] = + new DoubleIntervalGenerator(Double.parseDouble(fi.getMin()), + Double.parseDouble(fi.getMax()), prefix, suffix); break; } default: { @@ -813,14 +811,14 @@ public class AdmDataGen { } case DATEBETWEENYEARS: { DateBetweenYearsDataGen dby = (DateBetweenYearsDataGen) rfdg; - declaredFieldsGenerators[i] = new DateBetweenYearsGenerator(dby.getMinYear(), - dby.getMaxYear()); + declaredFieldsGenerators[i] = + new DateBetweenYearsGenerator(dby.getMinYear(), dby.getMaxYear()); break; } case DATETIMEBETWEENYEARS: { DatetimeBetweenYearsDataGen dtby = (DatetimeBetweenYearsDataGen) rfdg; - declaredFieldsGenerators[i] = new DatetimeBetweenYearsGenerator(dtby.getMinYear(), - dtby.getMaxYear()); + declaredFieldsGenerators[i] = + new DatetimeBetweenYearsGenerator(dtby.getMinYear(), dtby.getMaxYear()); break; } case DATETIMEADDRANDHOURS: { @@ -842,21 +840,21 @@ public class AdmDataGen { throw new Exception("Couldn't find field " + dtarh.getAddToField() + " before field " + recType.getFieldNames()[i]); } - declaredFieldsGenerators[i] = new DatetimeAddRandHoursGenerator(dtarh.getMinHour(), - dtarh.getMaxHour(), adtg); + declaredFieldsGenerators[i] = + new DatetimeAddRandHoursGenerator(dtarh.getMinHour(), dtarh.getMaxHour(), adtg); break; } case AUTO: { AutoDataGen auto = (AutoDataGen) rfdg; switch (ti.getTypeTag()) { case INTEGER: { - declaredFieldsGenerators[i] = new IntAutoGenerator( - Integer.parseInt(auto.getInitValueStr())); + declaredFieldsGenerators[i] = + new IntAutoGenerator(Integer.parseInt(auto.getInitValueStr())); break; } case BIGINT: { - declaredFieldsGenerators[i] = new LongAutoGenerator( - Long.parseLong(auto.getInitValueStr())); + declaredFieldsGenerators[i] = + new LongAutoGenerator(Long.parseLong(auto.getInitValueStr())); break; } default: { @@ -881,9 +879,9 @@ public class AdmDataGen { if (!recType.isOpen()) { throw new Exception("Cannot generate undeclared fields for closed type " + recType); } - undeclaredFieldsGenerator = new GenFieldsIntGenerator(declaredFieldsGenerators.length, - ufdg.getMinUndeclaredFields(), ufdg.getMaxUndeclaredFields(), - ufdg.getUndeclaredFieldsPrefix()); + undeclaredFieldsGenerator = + new GenFieldsIntGenerator(declaredFieldsGenerators.length, ufdg.getMinUndeclaredFields(), + ufdg.getMaxUndeclaredFields(), ufdg.getUndeclaredFieldsPrefix()); } } if (undeclaredFieldsGenerator != null) { @@ -937,8 +935,7 @@ public class AdmDataGen { this.outputDir = outputDir; } - public void init() throws IOException, ParseException, ACIDException, - AlgebricksException { + public void init() throws IOException, ParseException, ACIDException, AlgebricksException { FileReader aql = new FileReader(schemaFile); IParser parser = parserFactory.createParser(aql); List<Statement> statements = parser.parse(); http://git-wip-us.apache.org/repos/asf/asterixdb/blob/d25513cc/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java ---------------------------------------------------------------------- diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java index 9d60995..c6095bd 100644 --- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java +++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Map; import org.apache.asterix.common.annotations.TypeDataGen; -import org.apache.asterix.common.exceptions.AsterixException; -import org.apache.asterix.common.exceptions.MetadataException; import org.apache.asterix.lang.common.base.Statement; import org.apache.asterix.lang.common.statement.DataverseDecl; import org.apache.asterix.lang.common.statement.TypeDecl; @@ -49,8 +47,8 @@ public class ADGenDmlTranslator extends AbstractLangTranslator { public void translate() throws AlgebricksException { String defaultDataverse = getDefaultDataverse(); - types = new HashMap<TypeSignature, IAType>(); - typeDataGenMap = new HashMap<TypeSignature, TypeDataGen>(); + types = new HashMap<>(); + typeDataGenMap = new HashMap<>(); for (Statement stmt : aqlStatements) { if (stmt.getKind() == Statement.Kind.TYPE_DECL) {
