abdullah alamoudi has uploaded a new change for review.

  https://asterix-gerrit.ics.uci.edu/455

Change subject: Fixed bug ASTERIXDB-1126 Turned out, the fix for ASTERIXDB-1115 
introduced this bug. the reason is that it tries to get the data type for the 
primary key from the record type definition. In case of external data, the type 
definition doesn't contain inform
......................................................................

Fixed bug ASTERIXDB-1126
Turned out, the fix for ASTERIXDB-1115 introduced this bug. the reason is that 
it tries to get the data type for the primary key from the record type 
definition. In case of external data, the type definition doesn't contain 
information about primary keys.

Change-Id: I71d924d7e2b7a7e6c752bc97679e612946afc17c
---
M asterix-app/pom.xml
M asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
M asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
M asterix-common/src/test/java/org/apache/asterix/test/aql/TestsUtils.java
M asterix-installer/pom.xml
M 
asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixClusterLifeCycleIT.java
M 
asterix-installer/src/test/java/org/apache/asterix/installer/transaction/DmlRecoveryIT.java
M 
asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
A 
asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.1.script.aql
A 
asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
A 
asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.3.script.aql
A 
asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.4.query.aql
A 
asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.5.script.aql
A 
asterix-installer/src/test/resources/transactionts/results/query_after_restart/external_index/external_index.1.adm
A 
asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/create_and_start.sh
A 
asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_delete.sh
A 
asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_start.sh
M asterix-installer/src/test/resources/transactionts/testsuite.xml
M 
asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
19 files changed, 332 insertions(+), 184 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb 
refs/changes/55/455/1

diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 95d9e29..8107e2a 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -97,6 +97,23 @@
                     </execution>
                 </executions>
             </plugin>
+            <plugin>
+                               <groupId>org.apache.maven.plugins</groupId>
+                               <artifactId>maven-jar-plugin</artifactId>
+                               <version>2.4</version>
+                               <configuration>
+                                       <includes>
+                                               <include>**/*.class</include>
+                                       </includes>
+                               </configuration>
+                               <executions>
+                                       <execution>
+                                               <goals>
+                                                       <goal>test-jar</goal>
+                                               </goals>
+                                       </execution>
+                               </executions>
+                       </plugin>
         </plugins>
     </build>
 
diff --git 
a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java 
b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
index 6d2de65..273b01e 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -36,7 +36,6 @@
  *
  * @author ramangrover29
  */
-@SuppressWarnings("deprecation")
 public class HDFSCluster {
 
     private static final String PATH_TO_HADOOP_CONF = 
"src/test/resources/hadoop/conf";
@@ -76,13 +75,29 @@
         build.startupOption(StartupOption.REGULAR);
         dfsCluster = build.build();
         dfs = FileSystem.get(conf);
-        loadData();
+        loadData("");
+    }
+    
+    public void setup(String basePath) throws Exception {
+        conf.addResource(new Path(basePath+PATH_TO_HADOOP_CONF + 
"/core-site.xml"));
+        conf.addResource(new Path(basePath+PATH_TO_HADOOP_CONF + 
"/mapred-site.xml"));
+        conf.addResource(new Path(basePath+PATH_TO_HADOOP_CONF + 
"/hdfs-site.xml"));
+        cleanupLocal();
+        //this constructor is deprecated in hadoop 2x 
+        //dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, 
true, true, StartupOption.REGULAR, null);
+        MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
+        build.nameNodePort(nameNodePort);
+        build.numDataNodes(numDataNodes);
+        build.startupOption(StartupOption.REGULAR);
+        dfsCluster = build.build();
+        dfs = FileSystem.get(conf);
+        loadData(basePath);
     }
 
-    private void loadData() throws IOException {
+    private void loadData(String hdfsPath) throws IOException {
         Path destDir = new Path(HDFS_PATH);
         dfs.mkdirs(destDir);
-        File srcDir = new File(DATA_PATH);
+        File srcDir = new File(hdfsPath+DATA_PATH);
         File[] listOfFiles = srcDir.listFiles();
         for (File srcFile : listOfFiles) {
             Path path = new Path(srcFile.getAbsolutePath());
@@ -108,7 +123,6 @@
         HDFSCluster cluster = new HDFSCluster();
         cluster.setup();
         JobConf conf = configureJobConf();
-        FileSystem fs = FileSystem.get(conf);
         InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
         for (InputSplit split : inputSplits) {
             System.out.println("split :" + split);
diff --git 
a/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java 
b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
index 370b8a1..2588c77 100644
--- 
a/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
+++ 
b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
@@ -31,7 +31,6 @@
 import org.junit.runners.Parameterized.Parameters;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
-
 import org.apache.asterix.test.aql.TestsUtils;
 import org.apache.asterix.test.runtime.RepeatRule.Repeat;
 import org.apache.asterix.testframework.context.TestCaseContext;
diff --git 
a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestsUtils.java 
b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestsUtils.java
index 8ff524e..09de4ba 100644
--- a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestsUtils.java
+++ b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestsUtils.java
@@ -73,8 +73,8 @@
     private static void runScriptAndCompareWithResult(File scriptFile, 
PrintWriter print, File expectedFile,
             File actualFile) throws Exception {
         System.err.println("Expected results file: " + 
expectedFile.toString());
-        BufferedReader readerExpected = new BufferedReader(new 
InputStreamReader(new FileInputStream(expectedFile),
-                "UTF-8"));
+        BufferedReader readerExpected = new BufferedReader(
+                new InputStreamReader(new FileInputStream(expectedFile), 
"UTF-8"));
         BufferedReader readerActual = new BufferedReader(
                 new InputStreamReader(new FileInputStream(actualFile), 
"UTF-8"));
         String lineExpected, lineActual;
@@ -87,8 +87,8 @@
                     if (lineExpected.isEmpty()) {
                         continue;
                     }
-                    throw new Exception("Result for " + scriptFile + " changed 
at line " + num + ":\n< " + lineExpected
-                            + "\n> ");
+                    throw new Exception(
+                            "Result for " + scriptFile + " changed at line " + 
num + ":\n< " + lineExpected + "\n> ");
                 }
 
                 if (!equalStrings(lineExpected.split("Time")[0], 
lineActual.split("Time")[0])) {
@@ -220,15 +220,14 @@
         final String url = "http://localhost:19002/query";;
 
         HttpMethodBase method = null;
-        if(str.length() + url.length() < MAX_URL_LENGTH ){
+        if (str.length() + url.length() < MAX_URL_LENGTH) {
             //Use GET for small-ish queries
             method = new GetMethod(url);
             method.setQueryString(new NameValuePair[] { new 
NameValuePair("query", str) });
-        }
-        else{
+        } else {
             //Use POST for bigger ones to avoid 413 FULL_HEAD
             method = new PostMethod(url);
-            ((PostMethod)method).setRequestEntity(new 
StringRequestEntity(str));
+            ((PostMethod) method).setRequestEntity(new 
StringRequestEntity(str));
         }
 
         //Set accepted output response type
@@ -395,7 +394,8 @@
 
         List<CompilationUnit> cUnits = 
testCaseCtx.getTestCase().getCompilationUnit();
         for (CompilationUnit cUnit : cUnits) {
-            LOGGER.info("Starting [TEST]: " + 
testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " ... ");
+            LOGGER.info(
+                    "Starting [TEST]: " + 
testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " ... ");
             testFileCtxs = testCaseCtx.getTestFiles(cUnit);
             expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
             for (TestFileContext ctx : testFileCtxs) {
@@ -410,8 +410,8 @@
                         case "update":
                             //isDmlRecoveryTest: set IP address
                             if (isDmlRecoveryTest && 
statement.contains("nc1://")) {
-                                statement = statement
-                                        .replaceAll("nc1://", 
"127.0.0.1://../../../../../../asterix-app/");
+                                statement = statement.replaceAll("nc1://",
+                                        
"127.0.0.1://../../../../../../asterix-app/");
                             }
 
                             TestsUtils.executeUpdate(statement);
@@ -436,7 +436,9 @@
                                 resultStream = executeAnyAQLAsync(statement, 
true, fmt);
 
                             if (queryCount >= expectedResultFileCtxs.size()) {
-                                throw new IllegalStateException("no result 
file for " + testFile.toString() + "; queryCount: " + queryCount + ", 
filectxs.size: " + expectedResultFileCtxs.size());
+                                throw new IllegalStateException(
+                                        "no result file for " + 
testFile.toString() + "; queryCount: " + queryCount
+                                                + ", filectxs.size: " + 
expectedResultFileCtxs.size());
                             }
                             expectedResultFile = 
expectedResultFileCtxs.get(queryCount).getFile();
 
@@ -484,17 +486,15 @@
                                 e.printStackTrace();
                             }
                             if (!failed) {
-                                throw new Exception("Test \"" + testFile + "\" 
FAILED!\n  An exception"
-                                        + "is expected.");
+                                throw new Exception(
+                                        "Test \"" + testFile + "\" FAILED!\n  
An exception" + "is expected.");
                             }
                             System.err.println("...but that was expected.");
                             break;
                         case "script":
                             try {
-                                String output = executeScript(
-                                        pb,
-                                        
getScriptPath(testFile.getAbsolutePath(), pb.environment().get("SCRIPT_HOME"),
-                                                statement.trim()));
+                                String output = executeScript(pb, 
getScriptPath(testFile.getAbsolutePath(),
+                                        pb.environment().get("SCRIPT_HOME"), 
statement.trim()));
                                 if (output.contains("ERROR")) {
                                     throw new Exception(output);
                                 }
@@ -514,8 +514,8 @@
                                 e.printStackTrace();
                             }
                             if (!failed) {
-                                throw new Exception("Test \"" + testFile + "\" 
FAILED!\n  An exception"
-                                        + "is expected.");
+                                throw new Exception(
+                                        "Test \"" + testFile + "\" FAILED!\n  
An exception" + "is expected.");
                             }
                             System.err.println("...but that was expected.");
                             break;
diff --git a/asterix-installer/pom.xml b/asterix-installer/pom.xml
index 3eb3dfa..e478ee0 100644
--- a/asterix-installer/pom.xml
+++ b/asterix-installer/pom.xml
@@ -264,6 +264,13 @@
                </dependency>
                <dependency>
                        <groupId>org.apache.asterix</groupId>
+                       <artifactId>asterix-app</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>test-jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.asterix</groupId>
                        <artifactId>asterix-server</artifactId>
                        <version>0.8.7-SNAPSHOT</version>
                        <type>zip</type>
diff --git 
a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixClusterLifeCycleIT.java
 
b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixClusterLifeCycleIT.java
index cdf264e..552eb37 100644
--- 
a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixClusterLifeCycleIT.java
+++ 
b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixClusterLifeCycleIT.java
@@ -36,7 +36,6 @@
 import org.junit.runners.Parameterized.Parameters;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
-
 import org.apache.asterix.test.aql.TestsUtils;
 import org.apache.asterix.testframework.context.TestCaseContext;
 
diff --git 
a/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/DmlRecoveryIT.java
 
b/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/DmlRecoveryIT.java
index c6f24b5..0211bed 100644
--- 
a/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/DmlRecoveryIT.java
+++ 
b/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/DmlRecoveryIT.java
@@ -36,7 +36,6 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
-
 import org.apache.asterix.test.aql.TestsUtils;
 import org.apache.asterix.testframework.context.TestCaseContext;
 import org.apache.asterix.testframework.context.TestFileContext;
@@ -50,7 +49,7 @@
     private static final Logger LOGGER = 
Logger.getLogger(RecoveryIT.class.getName());
     private static final String PATH_ACTUAL = "rttest/";
 
-    private static final String TESTSUITE_PATH_BASE = 
"../asterix-app/src/test/resources/runtimets/";
+    public static final String TESTSUITE_PATH_BASE = 
"../asterix-app/src/test/resources/runtimets/";
 
     private TestCaseContext tcCtx;
     private static File asterixInstallerPath;
diff --git 
a/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
 
b/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
index c70b504..dcc2550 100644
--- 
a/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
+++ 
b/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
@@ -32,8 +32,8 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
-
 import org.apache.asterix.test.aql.TestsUtils;
+import org.apache.asterix.test.runtime.HDFSCluster;
 import org.apache.asterix.testframework.context.TestCaseContext;
 
 @RunWith(Parameterized.class)
@@ -42,6 +42,7 @@
     private static final Logger LOGGER = 
Logger.getLogger(RecoveryIT.class.getName());
     private static final String PATH_ACTUAL = "rttest/";
     private static final String PATH_BASE = 
"src/test/resources/transactionts/";
+    private static final String HDFS_BASE = "../asterix-app/";
     private TestCaseContext tcCtx;
     private static File asterixInstallerPath;
     private static File installerTargetPath;
@@ -79,6 +80,8 @@
                 + "configure_and_validate.sh");
         TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "stop_and_delete.sh");
+        
+        HDFSCluster.getInstance().setup(HDFS_BASE);
     }
 
     @AfterClass
@@ -92,6 +95,7 @@
                 + "stop_and_delete.sh");
         TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "shutdown.sh");
+        HDFSCluster.getInstance().cleanup();
     }
 
     @Parameters
@@ -112,5 +116,4 @@
     public void test() throws Exception {
         TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb, false);
     }
-
 }
diff --git 
a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.1.script.aql
 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git 
a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
new file mode 100644
index 0000000..bab64a1
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Create an external dataset that contains records stored with 
text hdfs file format.
+                 Build an index over the external dataset age attribute
+                 Perform a query over the dataset using the index.
+* Expected Res : Success
+* Date         : 3rd Jan 2014
+*/
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type EmployeeType as closed {
+ id: int64,
+ name: string,
+ age: int64
+};
+
+create external dataset EmployeeDataset(EmployeeType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/external-indexing-test.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="|"));
+
+create index EmployeeAgeIdx on EmployeeDataset(age);
diff --git 
a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.3.script.aql
 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.3.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.3.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git 
a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.4.query.aql
 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.4.query.aql
new file mode 100644
index 0000000..61107f0
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.4.query.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Create an external dataset that contains records stored with 
text hdfs file format.
+                 Build an index over the external dataset age attribute
+                 Perform a query over the dataset using the index.
+* Expected Res : Success
+* Date         : 3rd Jan 2014
+*/
+use dataverse test;
+
+for $emp in dataset EmployeeDataset
+where $emp.age = 22
+order by $emp.id
+return $emp;
diff --git 
a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.5.script.aql
 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.5.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.5.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git 
a/asterix-installer/src/test/resources/transactionts/results/query_after_restart/external_index/external_index.1.adm
 
b/asterix-installer/src/test/resources/transactionts/results/query_after_restart/external_index/external_index.1.adm
new file mode 100644
index 0000000..6aca3d1
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/results/query_after_restart/external_index/external_index.1.adm
@@ -0,0 +1,3 @@
+[ { "id": 3, "name": "Samuel", "age": 22 }
+, { "id": 10, "name": "David", "age": 22 }
+ ]
diff --git 
a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/create_and_start.sh
 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c 
$MANAGIX_HOME/clusters/local/local.xml;
diff --git 
a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_delete.sh
 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git 
a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_start.sh
 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ 
b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/external_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/testsuite.xml 
b/asterix-installer/src/test/resources/transactionts/testsuite.xml
index cf95132..683d90d 100644
--- a/asterix-installer/src/test/resources/transactionts/testsuite.xml
+++ b/asterix-installer/src/test/resources/transactionts/testsuite.xml
@@ -18,6 +18,13 @@
  !-->
 <test-suite xmlns="urn:xml.testframework.asterix.apache.org" 
ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
 
+<test-group name="query_after_restart">
+       <test-case FilePath="query_after_restart">
+      <compilation-unit name="external_index">
+        <output-dir compare="Text">external_index</output-dir>
+      </compilation-unit>
+    </test-case>
+</test-group>
   <test-group name="recover_after_abort">
 
     <test-case FilePath="recover_after_abort">
diff --git 
a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
 
b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index 2478dc7..18f6fe8 100644
--- 
a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ 
b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@ -348,8 +348,8 @@
                     // querying an external dataset
                     Dataset dataset = ((DatasetDataSource) 
dataSource).getDataset();
                     String itemTypeName = dataset.getItemTypeName();
-                    IAType itemType = 
MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(),
-                            itemTypeName).getDatatype();
+                    IAType itemType = MetadataManager.INSTANCE
+                            .getDatatype(mdTxnCtx, dataset.getDataverseName(), 
itemTypeName).getDatatype();
                     ExternalDatasetDetails edd = (ExternalDatasetDetails) 
dataset.getDatasetDetails();
                     IAdapterFactory adapterFactory = 
getConfiguredAdapterFactory(dataset, edd.getAdapter(),
                             edd.getProperties(), itemType, false, null);
@@ -394,8 +394,8 @@
                     .getSerializerDeserializer(feedOutputType);
             RecordDescriptor feedDesc = new RecordDescriptor(new 
ISerializerDeserializer[] { payloadSerde });
 
-            FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) 
dataSource).getProperties().get(
-                    BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+            FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) 
dataSource).getProperties()
+                    .get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
             if (feedPolicy == null) {
                 throw new AlgebricksException("Feed not configured with a 
policy");
             }
@@ -434,8 +434,8 @@
                                 if 
(activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
                                         && activity.getFeedName()
                                                 
.equals(feedDataSource.getSourceFeedId().getFeedName())) {
-                                    locations = 
activity.getFeedActivityDetails().get(
-                                            
FeedActivityDetails.COMPUTE_LOCATIONS);
+                                    locations = 
activity.getFeedActivityDetails()
+                                            
.get(FeedActivityDetails.COMPUTE_LOCATIONS);
                                     locationArray = locations.split(",");
                                     break;
                                 }
@@ -489,8 +489,8 @@
     private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> 
buildLoadableDatasetScan(JobSpecification jobSpec,
             LoadableDataSource alds, IAdapterFactory adapterFactory, 
RecordDescriptor rDesc, boolean isPKAutoGenerated,
             List<List<String>> primaryKeys, ARecordType recType, int pkIndex) 
throws AlgebricksException {
-        if 
(!(adapterFactory.getSupportedOperations().equals(SupportedOperation.READ) || 
adapterFactory
-                
.getSupportedOperations().equals(SupportedOperation.READ_WRITE))) {
+        if 
(!(adapterFactory.getSupportedOperations().equals(SupportedOperation.READ)
+                || 
adapterFactory.getSupportedOperations().equals(SupportedOperation.READ_WRITE))) 
{
             throw new AlgebricksException(" External dataset adapter does not 
support read operation");
         }
         ExternalDataScanOperatorDescriptor dataScanner = new 
ExternalDataScanOperatorDescriptor(jobSpec, rDesc,
@@ -596,13 +596,13 @@
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> 
buildExternalDatasetDataScannerRuntime(
             JobSpecification jobSpec, IAType itemType, IAdapterFactory 
adapterFactory, IDataFormat format)
-            throws AlgebricksException {
+                    throws AlgebricksException {
         if (itemType.getTypeTag() != ATypeTag.RECORD) {
             throw new AlgebricksException("Can only scan datasets of 
records.");
         }
 
-        if 
(!(adapterFactory.getSupportedOperations().equals(SupportedOperation.READ) || 
adapterFactory
-                
.getSupportedOperations().equals(SupportedOperation.READ_WRITE))) {
+        if 
(!(adapterFactory.getSupportedOperations().equals(SupportedOperation.READ)
+                || 
adapterFactory.getSupportedOperations().equals(SupportedOperation.READ_WRITE))) 
{
             throw new AlgebricksException(" External dataset adapter does not 
support read operation");
         }
 
@@ -658,8 +658,8 @@
             case EXTERNAL:
                 String libraryName = primaryFeed.getAdaptorName().trim()
                         
.split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0];
-                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, 
primaryFeed, libraryName, adapterFactory
-                        .getClass().getName(), factoryOutput.second, 
policyAccessor);
+                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, 
primaryFeed, libraryName,
+                        adapterFactory.getClass().getName(), 
factoryOutput.second, policyAccessor);
                 break;
         }
 
@@ -690,11 +690,11 @@
             IBinaryComparatorFactory[] comparatorFactories;
 
             String itemTypeName = dataset.getItemTypeName();
-            ARecordType itemType = (ARecordType) 
MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
-                    dataset.getDataverseName(), itemTypeName).getDatatype();
+            ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
+                    .getDatatype(mdTxnCtx, dataset.getDataverseName(), 
itemTypeName).getDatatype();
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, itemType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    itemType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = null;
             int[] btreeFields = null;
 
@@ -708,7 +708,8 @@
                 }
                 Pair<IBinaryComparatorFactory[], ITypeTraits[]> 
comparatorFactoriesAndTypeTraits = 
getComparatorFactoriesAndTypeTraitsOfSecondaryBTreeIndex(
                         secondaryIndex.getIndexType(), 
secondaryIndex.getKeyFieldNames(),
-                        secondaryIndex.getKeyFieldTypes(), 
DatasetUtils.getPartitioningKeys(dataset), itemType);
+                        secondaryIndex.getKeyFieldTypes(), 
DatasetUtils.getPartitioningKeys(dataset), itemType,
+                        dataset.getDatasetType());
                 comparatorFactories = comparatorFactoriesAndTypeTraits.first;
                 typeTraits = comparatorFactoriesAndTypeTraits.second;
                 if (filterTypeTraits != null) {
@@ -767,33 +768,34 @@
                                     txnSubsystemProvider, 
ResourceType.LSM_BTREE);
                 }
             }
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
             AsterixRuntimeComponentsProvider rtcProvider = 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER;
             BTreeSearchOperatorDescriptor btreeSearchOp;
             if (dataset.getDatasetType() == DatasetType.INTERNAL) {
                 btreeSearchOp = new BTreeSearchOperatorDescriptor(jobSpec, 
outputRecDesc,
                         appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
                         spPc.first, typeTraits, comparatorFactories, 
bloomFilterKeyFields, lowKeyFields, highKeyFields,
-                        lowKeyInclusive, highKeyInclusive, new 
LSMBTreeDataflowHelperFactory(
-                                new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
-                                compactionInfo.second, isSecondary ? new 
SecondaryIndexOperationTrackerProvider(
-                                        dataset.getDatasetId()) : new 
PrimaryIndexOperationTrackerProvider(
-                                        dataset.getDatasetId()), rtcProvider,
-                                LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        lowKeyInclusive, highKeyInclusive,
+                        new LSMBTreeDataflowHelperFactory(new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                                compactionInfo.first, compactionInfo.second,
+                                isSecondary ? new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId())
+                                        : new 
PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                                rtcProvider, 
LSMBTreeIOOperationCallbackFactory.INSTANCE,
                                 
storageProperties.getBloomFilterFalsePositiveRate(), !isSecondary, 
filterTypeTraits,
-                                filterCmpFactories, btreeFields, filterFields, 
!temp), retainInput, retainNull,
-                        context.getNullWriterFactory(), searchCallbackFactory, 
minFilterFieldIndexes,
-                        maxFilterFieldIndexes);
+                                filterCmpFactories, btreeFields, filterFields, 
!temp),
+                        retainInput, retainNull, 
context.getNullWriterFactory(), searchCallbackFactory,
+                        minFilterFieldIndexes, maxFilterFieldIndexes);
             } else {
                 // External dataset <- use the btree with buddy btree->
                 // Be Careful of Key Start Index ?
                 int[] buddyBreeFields = new int[] { numSecondaryKeys };
                 ExternalBTreeWithBuddyDataflowHelperFactory 
indexDataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
-                        compactionInfo.first, compactionInfo.second, new 
SecondaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, 
getStorageProperties()
-                                .getBloomFilterFalsePositiveRate(), 
buddyBreeFields,
+                        compactionInfo.first, compactionInfo.second,
+                        new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                        
getStorageProperties().getBloomFilterFalsePositiveRate(), buddyBreeFields,
                         
ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this), 
!temp);
                 btreeSearchOp = new 
ExternalBTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, rtcProvider,
                         rtcProvider, spPc.first, typeTraits, 
comparatorFactories, bloomFilterKeyFields, lowKeyFields,
@@ -810,7 +812,7 @@
 
     private Pair<IBinaryComparatorFactory[], ITypeTraits[]> 
getComparatorFactoriesAndTypeTraitsOfSecondaryBTreeIndex(
             IndexType indexType, List<List<String>> sidxKeyFieldNames, 
List<IAType> sidxKeyFieldTypes,
-            List<List<String>> pidxKeyFieldNames, ARecordType recType) throws 
AlgebricksException {
+            List<List<String>> pidxKeyFieldNames, ARecordType recType, 
DatasetType dsType) throws AlgebricksException {
 
         IBinaryComparatorFactory[] comparatorFactories;
         ITypeTraits[] typeTraits;
@@ -832,8 +834,12 @@
         for (int j = 0; j < pidxKeyFieldCount; ++j, ++i) {
             IAType keyType = null;
             try {
-                keyType = recType.getSubFieldType(pidxKeyFieldNames.get(j));
-            } catch (IOException e) {
+                if (dsType == DatasetType.INTERNAL) {
+                    keyType = 
recType.getSubFieldType(pidxKeyFieldNames.get(j));
+                } else {
+                    keyType = IndexingConstants.getFieldType(j);
+                }
+            } catch (IOException | AsterixException e) {
                 throw new AlgebricksException(e);
             }
             comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
@@ -857,17 +863,15 @@
             Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, 
dataset.getDataverseName(),
                     dataset.getDatasetName(), indexName);
             if (secondaryIndex == null) {
-                throw new AlgebricksException("Code generation error: no index 
" + indexName + " for dataset "
-                        + dataset.getDatasetName());
+                throw new AlgebricksException(
+                        "Code generation error: no index " + indexName + " for 
dataset " + dataset.getDatasetName());
             }
             List<List<String>> secondaryKeyFields = 
secondaryIndex.getKeyFieldNames();
             List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
             int numSecondaryKeys = secondaryKeyFields.size();
             if (numSecondaryKeys != 1) {
-                throw new AlgebricksException(
-                        "Cannot use "
-                                + numSecondaryKeys
-                                + " fields as a key for the R-tree index. 
There can be only one field as a key for the R-tree index.");
+                throw new AlgebricksException("Cannot use " + numSecondaryKeys
+                        + " fields as a key for the R-tree index. There can be 
only one field as a key for the R-tree index.");
             }
             Pair<IAType, Boolean> keyTypePair = 
Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
                     secondaryKeyFields.get(0), recType);
@@ -904,8 +908,8 @@
             }
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, recType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    recType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = null;
             int[] rtreeFields = null;
             if (filterTypeTraits != null) {
@@ -918,8 +922,8 @@
             }
 
             IAType nestedKeyType = 
NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
             ISearchOperationCallbackFactory searchCallbackFactory = temp ? 
NoOpOperationCallbackFactory.INSTANCE
                     : new SecondaryIndexSearchOperationCallbackFactory();
 
@@ -927,17 +931,19 @@
             if (dataset.getDatasetType() == DatasetType.INTERNAL) {
                 rtreeSearchOp = new RTreeSearchOperatorDescriptor(jobSpec, 
outputRecDesc,
                         appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
-                        spPc.first, typeTraits, comparatorFactories, 
keyFields, new LSMRTreeDataflowHelperFactory(
-                                valueProviderFactories, RTreePolicyType.RTREE, 
primaryComparatorFactories,
+                        spPc.first, typeTraits, comparatorFactories, keyFields,
+                        new 
LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                                primaryComparatorFactories,
                                 new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
-                                compactionInfo.second, new 
SecondaryIndexOperationTrackerProvider(
-                                        dataset.getDatasetId()), 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                LSMRTreeIOOperationCallbackFactory.INSTANCE, 
proposeLinearizer(
-                                        nestedKeyType.getTypeTag(), 
comparatorFactories.length),
+                                compactionInfo.second,
+                                new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                                
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                                LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                                proposeLinearizer(nestedKeyType.getTypeTag(), 
comparatorFactories.length),
                                 
storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
-                                filterTypeTraits, filterCmpFactories, 
filterFields, !temp), retainInput, retainNull,
-                        context.getNullWriterFactory(), searchCallbackFactory, 
minFilterFieldIndexes,
-                        maxFilterFieldIndexes);
+                                filterTypeTraits, filterCmpFactories, 
filterFields, !temp),
+                        retainInput, retainNull, 
context.getNullWriterFactory(), searchCallbackFactory,
+                        minFilterFieldIndexes, maxFilterFieldIndexes);
 
             } else {
                 // External Dataset
@@ -953,8 +959,8 @@
                 // Create the operator
                 rtreeSearchOp = new 
ExternalRTreeSearchOperatorDescriptor(jobSpec, outputRecDesc,
                         appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
-                        spPc.first, typeTraits, comparatorFactories, 
keyFields, indexDataflowHelperFactory,
-                        retainInput, retainNull, 
context.getNullWriterFactory(), searchCallbackFactory);
+                        spPc.first, typeTraits, comparatorFactories, 
keyFields, indexDataflowHelperFactory, retainInput,
+                        retainNull, context.getNullWriterFactory(), 
searchCallbackFactory);
             }
 
             return new Pair<IOperatorDescriptor, 
AlgebricksPartitionConstraint>(rtreeSearchOp, spPc.second);
@@ -1033,8 +1039,8 @@
         }
         String tName = dataset.getItemTypeName();
         IAType itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, 
aqlId.getDataverseName(), tName).getDatatype();
-        AqlDataSourceType datasourceType = 
dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? 
AqlDataSourceType.EXTERNAL_DATASET
-                : AqlDataSourceType.INTERNAL_DATASET;
+        AqlDataSourceType datasourceType = 
dataset.getDatasetType().equals(DatasetType.EXTERNAL)
+                ? AqlDataSourceType.EXTERNAL_DATASET : 
AqlDataSourceType.INTERNAL_DATASET;
         return new DatasetDataSource(aqlId, aqlId.getDataverseName(), 
aqlId.getDatasourceName(), itemType,
                 datasourceType);
     }
@@ -1101,8 +1107,8 @@
             String indexName = primaryIndex.getIndexName();
 
             String itemTypeName = dataset.getItemTypeName();
-            ARecordType itemType = (ARecordType) 
MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
-                    dataset.getDataverseName(), itemTypeName).getDatatype();
+            ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
+                    .getDatatype(mdTxnCtx, dataset.getDataverseName(), 
itemTypeName).getDatatype();
             ITypeTraits[] typeTraits = 
DatasetUtils.computeTupleTypeTraits(dataset, itemType);
             IBinaryComparatorFactory[] comparatorFactories = 
DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                     itemType, context.getBinaryComparatorFactoryProvider());
@@ -1114,8 +1120,8 @@
             long numElementsHint = getCardinalityPerPartitionHint(dataset);
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, itemType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    itemType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = DatasetUtils.createFilterFields(dataset);
             int[] btreeFields = 
DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 
@@ -1124,15 +1130,16 @@
             // right callback
             // (ex. what's the expected behavior when there is an error during
             // bulkload?)
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
             TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new 
TreeIndexBulkLoadOperatorDescriptor(spec, null,
                     appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
                     splitsAndConstraint.first, typeTraits, 
comparatorFactories, bloomFilterKeyFields, fieldPermutation,
                     GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, 
numElementsHint, true,
                     new LSMBTreeDataflowHelperFactory(new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                            compactionInfo.first, compactionInfo.second, new 
PrimaryIndexOperationTrackerProvider(
-                                    dataset.getDatasetId()), 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            compactionInfo.first, compactionInfo.second,
+                            new 
PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                             LSMBTreeIOOperationCallbackFactory.INSTANCE,
                             
storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
                             filterCmpFactories, btreeFields, filterFields, 
!temp));
@@ -1147,13 +1154,13 @@
             IDataSource<AqlSourceId> dataSource, IOperatorSchema 
propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, 
List<LogicalVariable> additionalNonKeyFields,
             RecordDescriptor recordDesc, JobGenContext context, 
JobSpecification spec, boolean bulkload)
-            throws AlgebricksException {
+                    throws AlgebricksException {
 
         String datasetName = dataSource.getId().getDatasourceName();
         Dataset dataset = findDataset(dataSource.getId().getDataverseName(), 
datasetName);
         if (dataset == null) {
-            throw new AlgebricksException("Unknown dataset " + datasetName + " 
in dataverse "
-                    + dataSource.getId().getDataverseName());
+            throw new AlgebricksException(
+                    "Unknown dataset " + datasetName + " in dataverse " + 
dataSource.getId().getDataverseName());
         }
         boolean temp = dataset.getDatasetDetails().isTemp();
         isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
@@ -1182,8 +1189,8 @@
             String indexName = primaryIndex.getIndexName();
 
             String itemTypeName = dataset.getItemTypeName();
-            ARecordType itemType = (ARecordType) 
MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
-                    dataSource.getId().getDataverseName(), 
itemTypeName).getDatatype();
+            ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
+                    .getDatatype(mdTxnCtx, 
dataSource.getId().getDataverseName(), itemTypeName).getDatatype();
 
             ITypeTraits[] typeTraits = 
DatasetUtils.computeTupleTypeTraits(dataset, itemType);
 
@@ -1202,24 +1209,26 @@
             }
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, itemType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    itemType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = DatasetUtils.createFilterFields(dataset);
             int[] btreeFields = 
DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 
             TransactionSubsystemProvider txnSubsystemProvider = new 
TransactionSubsystemProvider();
-            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp ? new TempDatasetPrimaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, primaryKeyFields, txnSubsystemProvider, 
indexOp, ResourceType.LSM_BTREE)
+            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp
+                    ? new 
TempDatasetPrimaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            primaryKeyFields, txnSubsystemProvider, indexOp, 
ResourceType.LSM_BTREE)
                     : new 
PrimaryIndexModificationOperationCallbackFactory(jobId, datasetId, 
primaryKeyFields,
                             txnSubsystemProvider, indexOp, 
ResourceType.LSM_BTREE);
 
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
-            IIndexDataflowHelperFactory idfh = new 
LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    datasetId), compactionInfo.first, compactionInfo.second, 
new PrimaryIndexOperationTrackerProvider(
-                    dataset.getDatasetId()), 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMBTreeIOOperationCallbackFactory.INSTANCE, 
storageProperties.getBloomFilterFalsePositiveRate(),
-                    true, filterTypeTraits, filterCmpFactories, btreeFields, 
filterFields, !temp);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory idfh = new 
LSMBTreeDataflowHelperFactory(
+                    new AsterixVirtualBufferCacheProvider(datasetId), 
compactionInfo.first, compactionInfo.second,
+                    new 
PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, 
LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), true, 
filterTypeTraits, filterCmpFactories,
+                    btreeFields, filterFields, !temp);
             IOperatorDescriptor op;
             if (bulkload) {
                 long numElementsHint = getCardinalityPerPartitionHint(dataset);
@@ -1245,7 +1254,7 @@
             IDataSource<AqlSourceId> dataSource, IOperatorSchema 
propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, 
List<LogicalVariable> additionalNonKeyFields,
             RecordDescriptor recordDesc, JobGenContext context, 
JobSpecification spec, boolean bulkload)
-            throws AlgebricksException {
+                    throws AlgebricksException {
         return getInsertOrDeleteRuntime(IndexOperation.INSERT, dataSource, 
propagatedSchema, typeEnv, keys, payload,
                 additionalNonKeyFields, recordDesc, context, spec, bulkload);
     }
@@ -1283,14 +1292,14 @@
         AsterixTupleFilterFactory filterFactory = 
createTupleFilterFactory(inputSchemas, typeEnv, filterExpr, context);
         switch (secondaryIndex.getIndexType()) {
             case BTREE: {
-                return getBTreeDmlRuntime(dataverseName, datasetName, 
indexName, propagatedSchema, typeEnv,
-                        primaryKeys, secondaryKeys, additionalNonKeyFields, 
filterFactory, recordDesc, context, spec,
-                        indexOp, bulkload);
+                return getBTreeDmlRuntime(dataverseName, datasetName, 
indexName, propagatedSchema, typeEnv, primaryKeys,
+                        secondaryKeys, additionalNonKeyFields, filterFactory, 
recordDesc, context, spec, indexOp,
+                        bulkload);
             }
             case RTREE: {
-                return getRTreeDmlRuntime(dataverseName, datasetName, 
indexName, propagatedSchema, typeEnv,
-                        primaryKeys, secondaryKeys, additionalNonKeyFields, 
filterFactory, recordDesc, context, spec,
-                        indexOp, bulkload);
+                return getRTreeDmlRuntime(dataverseName, datasetName, 
indexName, propagatedSchema, typeEnv, primaryKeys,
+                        secondaryKeys, additionalNonKeyFields, filterFactory, 
recordDesc, context, spec, indexOp,
+                        bulkload);
             }
             case SINGLE_PARTITION_WORD_INVIX:
             case SINGLE_PARTITION_NGRAM_INVIX:
@@ -1301,8 +1310,8 @@
                         indexOp, secondaryIndex.getIndexType(), bulkload);
             }
             default: {
-                throw new AlgebricksException("Insert and delete not 
implemented for index type: "
-                        + secondaryIndex.getIndexType());
+                throw new AlgebricksException(
+                        "Insert and delete not implemented for index type: " + 
secondaryIndex.getIndexType());
             }
         }
     }
@@ -1373,7 +1382,7 @@
             IVariableTypeEnvironment typeEnv, List<LogicalVariable> 
primaryKeys, List<LogicalVariable> secondaryKeys,
             AsterixTupleFilterFactory filterFactory, RecordDescriptor 
recordDesc, JobGenContext context,
             JobSpecification spec, IndexOperation indexOp, IndexType 
indexType, boolean bulkload)
-            throws AlgebricksException {
+                    throws AlgebricksException {
 
         // Sanity checks.
         if (primaryKeys.size() > 1) {
@@ -1548,7 +1557,8 @@
 
             tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec, 
tokenKeyPairRecDesc, tokenizerFactory, docField,
                     keyFields, isPartitioned, true);
-            return new Pair<IOperatorDescriptor, 
AlgebricksPartitionConstraint>(tokenizerOp, splitsAndConstraint.second);
+            return new Pair<IOperatorDescriptor, 
AlgebricksPartitionConstraint>(tokenizerOp,
+                    splitsAndConstraint.second);
 
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
@@ -1563,7 +1573,7 @@
             IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, 
List<LogicalVariable> primaryKeys,
             List<LogicalVariable> secondaryKeys, List<LogicalVariable> 
additionalNonKeyFields,
             ILogicalExpression filterExpr, RecordDescriptor recordDesc, 
JobGenContext context, JobSpecification spec)
-            throws AlgebricksException {
+                    throws AlgebricksException {
         return getIndexInsertOrDeleteRuntime(IndexOperation.DELETE, 
dataSourceIndex, propagatedSchema, inputSchemas,
                 typeEnv, primaryKeys, secondaryKeys, additionalNonKeyFields, 
filterExpr, recordDesc, context, spec,
                 false);
@@ -1571,7 +1581,7 @@
 
     private AsterixTupleFilterFactory 
createTupleFilterFactory(IOperatorSchema[] inputSchemas,
             IVariableTypeEnvironment typeEnv, ILogicalExpression filterExpr, 
JobGenContext context)
-            throws AlgebricksException {
+                    throws AlgebricksException {
         // No filtering condition.
         if (filterExpr == null) {
             return null;
@@ -1640,8 +1650,8 @@
                     dataset.getDatasetName(), indexName);
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, recType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    recType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = null;
             int[] btreeFields = null;
             if (filterTypeTraits != null) {
@@ -1661,15 +1671,15 @@
                 Pair<IAType, Boolean> keyPairType = 
Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(i),
                         secondaryKeyNames.get(i), recType);
                 IAType keyType = keyPairType.first;
-                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                        keyType, true);
+                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
+                        true);
                 typeTraits[i] = 
AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
             }
             List<List<String>> partitioningKeys = 
DatasetUtils.getPartitioningKeys(dataset);
             for (List<String> partitioningKey : partitioningKeys) {
                 IAType keyType = recType.getSubFieldType(partitioningKey);
-                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                        keyType, true);
+                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
+                        true);
                 typeTraits[i] = 
AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
                 ++i;
             }
@@ -1682,15 +1692,17 @@
             JobId jobId = ((JobEventListenerFactory) 
spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
             TransactionSubsystemProvider txnSubsystemProvider = new 
TransactionSubsystemProvider();
-            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
-                    ResourceType.LSM_BTREE) : new 
SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
-                    modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
+            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp
+                    ? new 
TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE)
+                    : new 
SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
+                            ResourceType.LSM_BTREE);
 
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
-            IIndexDataflowHelperFactory idfh = new 
LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    datasetId), compactionInfo.first, compactionInfo.second,
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory idfh = new 
LSMBTreeDataflowHelperFactory(
+                    new AsterixVirtualBufferCacheProvider(datasetId), 
compactionInfo.first, compactionInfo.second,
                     new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, 
LSMBTreeIOOperationCallbackFactory.INSTANCE,
                     storageProperties.getBloomFilterFalsePositiveRate(), 
false, filterTypeTraits, filterCmpFactories,
@@ -1706,14 +1718,15 @@
                 op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, 
recordDesc,
                         appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
                         splitsAndConstraint.first, typeTraits, 
comparatorFactories, bloomFilterKeyFields,
-                        fieldPermutation, indexOp, new 
LSMBTreeDataflowHelperFactory(
-                                new 
AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first,
-                                compactionInfo.second, new 
SecondaryIndexOperationTrackerProvider(
-                                        dataset.getDatasetId()), 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        fieldPermutation, indexOp,
+                        new LSMBTreeDataflowHelperFactory(new 
AsterixVirtualBufferCacheProvider(datasetId),
+                                compactionInfo.first, compactionInfo.second,
+                                new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                                
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                                 LSMBTreeIOOperationCallbackFactory.INSTANCE,
                                 
storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
-                                filterCmpFactories, btreeFields, filterFields, 
!temp), filterFactory,
-                        modificationCallbackFactory, false, indexName);
+                                filterCmpFactories, btreeFields, filterFields, 
!temp),
+                        filterFactory, modificationCallbackFactory, false, 
indexName);
             }
             return new Pair<IOperatorDescriptor, 
AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
         } catch (MetadataException e) {
@@ -1852,8 +1865,8 @@
                     secondaryKeyType.getTypeTag(), indexType, 
secondaryIndex.getGramLength());
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, recType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    recType, context.getBinaryComparatorFactoryProvider());
 
             int[] filterFields = null;
             int[] invertedIndexFields = null;
@@ -1883,18 +1896,20 @@
             JobId jobId = ((JobEventListenerFactory) 
spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
             TransactionSubsystemProvider txnSubsystemProvider = new 
TransactionSubsystemProvider();
-            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
-                    ResourceType.LSM_INVERTED_INDEX) : new 
SecondaryIndexModificationOperationCallbackFactory(jobId,
-                    datasetId, modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
-                    ResourceType.LSM_INVERTED_INDEX);
+            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp
+                    ? new 
TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
+                            ResourceType.LSM_INVERTED_INDEX)
+                    : new 
SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
+                            ResourceType.LSM_INVERTED_INDEX);
 
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
             IIndexDataflowHelperFactory indexDataFlowFactory;
             if (!isPartitioned) {
-                indexDataFlowFactory = new 
LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        datasetId), compactionInfo.first, 
compactionInfo.second,
+                indexDataFlowFactory = new 
LSMInvertedIndexDataflowHelperFactory(
+                        new AsterixVirtualBufferCacheProvider(datasetId), 
compactionInfo.first, compactionInfo.second,
                         new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                         AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                         LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
@@ -1991,8 +2006,8 @@
             IAType nestedKeyType = 
NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
             IPrimitiveValueProviderFactory[] valueProviderFactories = new 
IPrimitiveValueProviderFactory[numSecondaryKeys];
             for (i = 0; i < numSecondaryKeys; i++) {
-                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                        nestedKeyType, true);
+                comparatorFactories[i] = 
AqlBinaryComparatorFactoryProvider.INSTANCE
+                        .getBinaryComparatorFactory(nestedKeyType, true);
                 typeTraits[i] = 
AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
                 valueProviderFactories[i] = 
AqlPrimitiveValueProviderFactory.INSTANCE;
             }
@@ -2014,8 +2029,8 @@
             }
 
             ITypeTraits[] filterTypeTraits = 
DatasetUtils.computeFilterTypeTraits(dataset, recType);
-            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recType, 
context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = 
DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    recType, context.getBinaryComparatorFactoryProvider());
             int[] filterFields = null;
             int[] rtreeFields = null;
             if (filterTypeTraits != null) {
@@ -2031,17 +2046,19 @@
             JobId jobId = ((JobEventListenerFactory) 
spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
             TransactionSubsystemProvider txnSubsystemProvider = new 
TransactionSubsystemProvider();
-            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
-                    ResourceType.LSM_RTREE) : new 
SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
-                    modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
+            IModificationOperationCallbackFactory modificationCallbackFactory 
= temp
+                    ? new 
TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE)
+                    : new 
SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+                            modificationCallbackPrimaryKeyFields, 
txnSubsystemProvider, indexOp,
+                            ResourceType.LSM_RTREE);
 
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils.getMergePolicyFactory(
-                    dataset, mdTxnCtx);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = 
DatasetUtils
+                    .getMergePolicyFactory(dataset, mdTxnCtx);
             IIndexDataflowHelperFactory idfh = new 
LSMRTreeDataflowHelperFactory(valueProviderFactories,
-                    RTreePolicyType.RTREE, primaryComparatorFactories, new 
AsterixVirtualBufferCacheProvider(
-                            dataset.getDatasetId()), compactionInfo.first, 
compactionInfo.second,
-                    new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    RTreePolicyType.RTREE, primaryComparatorFactories,
+                    new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+                    compactionInfo.second, new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                     AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, 
LSMRTreeIOOperationCallbackFactory.INSTANCE,
                     proposeLinearizer(nestedKeyType.getTypeTag(), 
comparatorFactories.length),
                     storageProperties.getBloomFilterFalsePositiveRate(), 
rtreeFields, btreeFields, filterTypeTraits,
@@ -2058,15 +2075,16 @@
                         appContext.getStorageManagerInterface(), 
appContext.getIndexLifecycleManagerProvider(),
                         splitsAndConstraint.first, typeTraits, 
comparatorFactories, null, fieldPermutation, indexOp,
                         new 
LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                                primaryComparatorFactories, new 
AsterixVirtualBufferCacheProvider(dataset
-                                        .getDatasetId()), 
compactionInfo.first, compactionInfo.second,
+                                primaryComparatorFactories,
+                                new 
AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+                                compactionInfo.second,
                                 new 
SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                                 
AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                LSMRTreeIOOperationCallbackFactory.INSTANCE, 
proposeLinearizer(
-                                        nestedKeyType.getTypeTag(), 
comparatorFactories.length), storageProperties
-                                        .getBloomFilterFalsePositiveRate(), 
rtreeFields, btreeFields, filterTypeTraits,
-                                filterCmpFactories, filterFields, !temp), 
filterFactory,
-                        modificationCallbackFactory, false, indexName);
+                                LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                                proposeLinearizer(nestedKeyType.getTypeTag(), 
comparatorFactories.length),
+                                
storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
+                                filterTypeTraits, filterCmpFactories, 
filterFields, !temp),
+                        filterFactory, modificationCallbackFactory, false, 
indexName);
             }
             return new Pair<IOperatorDescriptor, 
AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
         } catch (MetadataException | IOException e) {
@@ -2255,8 +2273,8 @@
         try {
             type = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverse, 
typeName);
         } catch (MetadataException e) {
-            throw new AlgebricksException("Metadata exception while looking up 
type '" + typeName + "' in dataverse '"
-                    + dataverse + "'", e);
+            throw new AlgebricksException(
+                    "Metadata exception while looking up type '" + typeName + 
"' in dataverse '" + dataverse + "'", e);
         }
         if (type == null) {
             throw new AlgebricksException("Type name '" + typeName + "' 
unknown in dataverse '" + dataverse + "'");
@@ -2366,16 +2384,16 @@
                     String[] ioDevices = 
AsterixClusterProperties.INSTANCE.getIODevices(nd);
                     if (create) {
                         for (int j = 0; j < nodeStores.length; j++) {
-                            File f = new File(ioDevices[0] + File.separator + 
nodeStores[j] + File.separator
-                                    + relPathFile);
+                            File f = new File(
+                                    ioDevices[0] + File.separator + 
nodeStores[j] + File.separator + relPathFile);
                             splitArray.add(new FileSplit(nd, new 
FileReference(f), 0));
                         }
                     } else {
                         int numIODevices = 
AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nd);
                         for (int j = 0; j < nodeStores.length; j++) {
                             for (int k = 0; k < numIODevices; k++) {
-                                File f = new File(ioDevices[0] + 
File.separator + nodeStores[j] + File.separator
-                                        + relPathFile);
+                                File f = new File(
+                                        ioDevices[0] + File.separator + 
nodeStores[j] + File.separator + relPathFile);
                                 splitArray.add(new FileSplit(nd, new 
FileReference(f), 0));
                             }
                         }

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/455
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I71d924d7e2b7a7e6c752bc97679e612946afc17c
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: abdullah alamoudi <[email protected]>

Reply via email to