abdullah alamoudi has uploaded a new change for review.

  https://asterix-gerrit.ics.uci.edu/988

Change subject: ASTERIXDB-1269 Fix Extraction of Query Segment and Refactor AQL 
Parser
......................................................................

ASTERIXDB-1269 Fix Extraction of Query Segment and Refactor AQL Parser

This change fixes the issue ASTERIXDB-1269 and refactor aql.jj
removing qouted strings throughout the file and creating token's
definitions for them instead

Change-Id: I18ac4f8d86b3c5c7bfe226c98114499671649e93
---
M asterixdb/asterix-app/pom.xml
A 
asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.1.ddl.aql
A 
asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.2.query.aql
A 
asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.3.ddl.aql
A 
asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/single-line-definition/single-line-definition.1.adm
M asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
M asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
M 
asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
8 files changed, 253 insertions(+), 74 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb 
refs/changes/88/988/1

diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index 43adabf..7d0a9ba 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -145,6 +145,35 @@
         </executions>
       </plugin>
     </plugins>
+    <pluginManagement>
+        <plugins>
+            <!--This plugin's configuration is used to store Eclipse m2e 
settings only. It has no influence on the Maven build itself.-->
+            <plugin>
+                <groupId>org.eclipse.m2e</groupId>
+                <artifactId>lifecycle-mapping</artifactId>
+                <version>1.0.0</version>
+                <configuration>
+                    <lifecycleMappingMetadata>
+                        <pluginExecutions>
+                            <pluginExecution>
+                                <pluginExecutionFilter>
+                                    <groupId>org.apache.asterix</groupId>
+                                    
<artifactId>asterix-test-datagenerator-maven-plugin</artifactId>
+                                    
<versionRange>[0.8.9-SNAPSHOT,)</versionRange>
+                                    <goals>
+                                        <goal>generate-testdata</goal>
+                                    </goals>
+                                </pluginExecutionFilter>
+                                <action>
+                                    <ignore></ignore>
+                                </action>
+                            </pluginExecution>
+                        </pluginExecutions>
+                    </lifecycleMappingMetadata>
+                </configuration>
+            </plugin>
+        </plugins>
+    </pluginManagement>
   </build>
   <dependencies>
     <dependency>
diff --git 
a/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.1.ddl.aql
 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.1.ddl.aql
new file mode 100644
index 0000000..9fdd9be
--- /dev/null
+++ 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.1.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a function definition in a single line
+ * See: https://issues.apache.org/jira/browse/ASTERIXDB-1269
+ * Expected Res : Success
+ * Date         : Jul 10th 2016
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create function printName() { 'AsterixDB Shared nothing parallel BDMS' };
\ No newline at end of file
diff --git 
a/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.2.query.aql
 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.2.query.aql
new file mode 100644
index 0000000..087788a
--- /dev/null
+++ 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.2.query.aql
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a function definition in a single line
+ * See: https://issues.apache.org/jira/browse/ASTERIXDB-1269
+ * Expected Res : Success
+ * Date         : Jul 10th 2016
+ */
+
+for $x in dataset Metadata.Function
+where $x.DataverseName = "test"
+return $x;
diff --git 
a/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.3.ddl.aql
 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.3.ddl.aql
new file mode 100644
index 0000000..3e20182
--- /dev/null
+++ 
b/asterixdb/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/single-line-definition/single-line-definition.3.ddl.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a function definition in a single line
+ * See: https://issues.apache.org/jira/browse/ASTERIXDB-1269
+ * Expected Res : Success
+ * Date         : Jul 10th 2016
+ */
+
+
+drop dataverse test;
\ No newline at end of file
diff --git 
a/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/single-line-definition/single-line-definition.1.adm
 
b/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/single-line-definition/single-line-definition.1.adm
new file mode 100644
index 0000000..5ba00ae
--- /dev/null
+++ 
b/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/single-line-definition/single-line-definition.1.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "Name": "printName", "Arity": "0", "Params": [  ], 
"ReturnType": "VOID", "Definition": "'AsterixDB Shared nothing parallel BDMS'", 
"Language": "AQL", "Kind": "SCALAR" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml 
b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index f7a9b6d..ffa4346 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -6118,6 +6118,11 @@
   </test-group>
   <test-group name="user-defined-functions">
     <test-case FilePath="user-defined-functions">
+      <compilation-unit name="single-line-definition">
+        <output-dir compare="Text">single-line-definition</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="user-defined-functions">
       <compilation-unit name="query-issue218-2">
         <output-dir compare="Text">query-issue218-2</output-dir>
       </compilation-unit>
diff --git a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj 
b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
index b96fb26..e4da011 100644
--- a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -157,9 +157,7 @@
     private static final String SKIP_SECONDARY_INDEX_SEARCH_HINT = 
"skip-index";
     private static final String VAL_FILE_HINT = "val-files";
     private static final String VAL_FILE_SAME_INDEX_HINT = "val-file-same-idx";
-
     private static final String GEN_FIELDS_HINT = "gen-fields";
-
     // data generator hints
     private static final String DGEN_HINT = "dgen";
 
@@ -314,7 +312,7 @@
   String dvName = null;
 }
 {
-  "use" "dataverse" dvName = Identifier()
+  <USE> <DATAVERSE> dvName = Identifier()
     {
       defaultDataverse = dvName;
       return new DataverseDecl(new Identifier(dvName));
@@ -328,7 +326,7 @@
   Statement stmt = null;
 }
 {
-  "create"
+  <CREATE>
   (
     {
       hint = getHint(token);
@@ -357,8 +355,8 @@
   TypeExpression typeExpr = null;
 }
 {
-  "type" nameComponents = TypeName() ifNotExists = IfNotExists()
-  "as" typeExpr = TypeExpr()
+  <TYPE> nameComponents = TypeName() ifNotExists = IfNotExists()
+  <AS> typeExpr = TypeExpr()
     {
       long numValues = -1;
       String filename = null;
@@ -384,8 +382,8 @@
   List<Identifier>ncNames = null;
 }
 {
-  "nodegroup" name = Identifier()
-  ifNotExists = IfNotExists() "on" tmp = Identifier()
+  <NODEGROUP> name = Identifier()
+  ifNotExists = IfNotExists() <ON> tmp = Identifier()
     {
       ncNames = new ArrayList<Identifier>();
       ncNames.add(new Identifier(tmp));
@@ -421,13 +419,13 @@
 }
 {
   (
-    "external" <DATASET> nameComponents = QualifiedName()
+    <EXTERNAL> <DATASET> nameComponents = QualifiedName()
     <LEFTPAREN> typeComponents = TypeName() <RIGHTPAREN>
     ifNotExists = IfNotExists()
-    "using" adapterName = AdapterName() properties = Configuration()
-    ("on" nodeGroupName = Identifier() )?
-    ( "hints" hints = Properties() )?
-    ( "using" "compaction" "policy" compactionPolicy = CompactionPolicy() 
(compactionPolicyProperties = Configuration())? )?
+    <USING> adapterName = AdapterName() properties = Configuration()
+    (<ON> nodeGroupName = Identifier() )?
+    ( <HINTS> hints = Properties() )?
+    ( <USING> <COMPACTION> <POLICY> compactionPolicy = CompactionPolicy() 
(compactionPolicyProperties = Configuration())? )?
       {
         ExternalDetailsDecl edd = new ExternalDetailsDecl();
         edd.setAdapter(adapterName);
@@ -447,7 +445,7 @@
                                    ifNotExists);
       }
 
-    | ("internal" | "temporary" {
+    | (<INTERNAL> | <TEMPORARY> {
             temp = token.image.toLowerCase().equals("temporary");
         }
       )?
@@ -466,11 +464,11 @@
     )?
     ifNotExists = IfNotExists()
     primaryKeyFields = PrimaryKey()
-    ("autogenerated" { autogenerated = true; } )?
-    ("on" nodeGroupName = Identifier() )?
-    ( "hints" hints = Properties() )?
-    ( "using" "compaction" "policy" compactionPolicy = CompactionPolicy() 
(compactionPolicyProperties = Configuration())? )?
-    ( "with filter on" filterField = NestedField() )?
+    (<AUTOGENERATED> { autogenerated = true; } )?
+    (<ON> nodeGroupName = Identifier() )?
+    ( <HINTS> hints = Properties() )?
+    ( <USING> <COMPACTION> <POLICY> compactionPolicy = CompactionPolicy() 
(compactionPolicyProperties = Configuration())? )?
+    ( <WITH> <FILTER> <ON> filterField = NestedField() )?
       {
         if(filterField!=null && filterField.first!=0){
           throw new ParseException("A filter field can only be a field in the 
main record of the dataset.");
@@ -507,7 +505,7 @@
   String datasetName = null;
 }
 {
-    "refresh external" <DATASET> nameComponents = QualifiedName()
+    <REFRESH> <EXTERNAL> <DATASET> nameComponents = QualifiedName()
     {
     redss.setDataverseName(nameComponents.first);
     redss.setDatasetName(nameComponents.second);
@@ -524,13 +522,13 @@
   Pair<Identifier,Identifier> nameComponentsTo = null;
 }
 {
-  "run" system = Identifier()<LEFTPAREN> ( tmp = Identifier() [<COMMA>]
+  <RUN> system = Identifier()<LEFTPAREN> ( tmp = Identifier() [<COMMA>]
     {
       parameters.add(tmp);
     }
   )*<RIGHTPAREN>
   <FROM> <DATASET> nameComponentsFrom  = QualifiedName()
-  "to" <DATASET> nameComponentsTo  = QualifiedName()
+  <TO> <DATASET> nameComponentsTo  = QualifiedName()
     {
       return new RunStatement(system, parameters, nameComponentsFrom.first, 
nameComponentsFrom.second, nameComponentsTo.first, nameComponentsTo.second);
     }
@@ -547,9 +545,9 @@
   boolean enforced = false;
 }
 {
-  "index" indexName = Identifier()
+  <INDEX> indexName = Identifier()
   ifNotExists = IfNotExists()
-  "on" nameComponents = QualifiedName()
+  <ON> nameComponents = QualifiedName()
   <LEFTPAREN> ( fieldPair = OpenField()
     {
        cis.addFieldExprPair(fieldPair.second);
@@ -560,7 +558,7 @@
        cis.addFieldExprPair(fieldPair.second);
        cis.addFieldIndexIndicator(fieldPair.first);
     }
-  )* <RIGHTPAREN> ( "type" indexType = IndexType() )? ( "enforced" { enforced 
= true; } )?
+  )* <RIGHTPAREN> ( <TYPE> indexType = IndexType() )? ( <ENFORCED> { enforced 
= true; } )?
     {
       cis.setIndexName(new Identifier(indexName));
       cis.setIfNotExists(ifNotExists);
@@ -603,19 +601,19 @@
   int gramLength = 0;
 }
 {
-  ("btree"
+  (<BTREE>
     {
       type = IndexType.BTREE;
     }
-  | "rtree"
+  | <RTREE>
     {
       type = IndexType.RTREE;
     }
-  | "keyword"
+  | <KEYWORD>
     {
       type = IndexType.LENGTH_PARTITIONED_WORD_INVIX;
     }
-  | "ngram" <LEFTPAREN> <INTEGER_LITERAL>
+  | <NGRAM> <LEFTPAREN> <INTEGER_LITERAL>
     {
       type = IndexType.LENGTH_PARTITIONED_NGRAM_INVIX;
       gramLength = Integer.valueOf(token.image);
@@ -633,9 +631,9 @@
   String format = null;
 }
 {
-  "dataverse" dvName = Identifier()
+  <DATAVERSE> dvName = Identifier()
   ifNotExists = IfNotExists()
-  ( "with format" format = StringLiteral() )?
+  ( <WITH> <FORMAT> format = StringLiteral() )?
     {
       return new CreateDataverseStatement(new Identifier(dvName), format, 
ifNotExists);
     }
@@ -656,7 +654,7 @@
   createNewScope();
 }
 {
-  "function" fctName = FunctionName()
+  <FUNCTION>fctName = FunctionName()
   ifNotExists = IfNotExists()
   paramList = ParameterList()
   <LEFTBRACE>
@@ -688,15 +686,15 @@
 }
 {
   (
-    "secondary" "feed"  nameComponents = QualifiedName() ifNotExists = 
IfNotExists()
-      <FROM> "feed" sourceNameComponents = QualifiedName() (appliedFunction = 
ApplyFunction())?
+    <SECONDARY> <FEED>  nameComponents = QualifiedName() ifNotExists = 
IfNotExists()
+      <FROM> <FEED> sourceNameComponents = QualifiedName() (appliedFunction = 
ApplyFunction())?
       {
         cfs = new CreateSecondaryFeedStatement(nameComponents,
                                    sourceNameComponents, appliedFunction, 
ifNotExists);
       }
      |
-     ("primary")? "feed" nameComponents = QualifiedName() ifNotExists = 
IfNotExists()
-      "using" adapterName = AdapterName() properties = Configuration() 
(appliedFunction = ApplyFunction())?
+     (<PRIMARY>)? <FEED> nameComponents = QualifiedName() ifNotExists = 
IfNotExists()
+      <USING> adapterName = AdapterName() properties = Configuration() 
(appliedFunction = ApplyFunction())?
        {
         cfs = new CreatePrimaryFeedStatement(nameComponents,
                                     adapterName, properties, appliedFunction, 
ifNotExists);
@@ -719,14 +717,14 @@
 }
 {
   (
-    "ingestion" "policy"  policyName = Identifier() ifNotExists = IfNotExists()
+    <INGESTION> <POLICY>  policyName = Identifier() ifNotExists = IfNotExists()
       <FROM>
-      ("policy" basePolicyName = Identifier() properties = Configuration() 
("definition" definition = StringLiteral())?
+      (<POLICY> basePolicyName = Identifier() properties = Configuration() 
(<DEFINITION> definition = StringLiteral())?
       {
         cfps = new CreateFeedPolicyStatement(policyName,
                                    basePolicyName, properties, definition, 
ifNotExists);
       }
-     | "path" sourcePolicyFile = Identifier() ("definition" definition = 
StringLiteral())?
+     | <PATH> sourcePolicyFile = Identifier() (<DEFINITION> definition = 
StringLiteral())?
        {
         cfps = new CreateFeedPolicyStatement(policyName, sourcePolicyFile, 
definition, ifNotExists);
        }
@@ -770,7 +768,7 @@
 {
 }
 {
-  ( "if not exists"
+  ( <IF> <NOT> <EXISTS>
     {
       return true;
     }
@@ -786,7 +784,7 @@
   FunctionSignature funcSig = null;
 }
 {
-  "apply" "function" functioName = FunctionName()
+  <APPLY><FUNCTION>functioName = FunctionName()
     {
        String fqFunctionName = functioName.library == null ? 
functioName.function : functioName.library + "#" + functioName.function;
        return new FunctionSignature(functioName.dataverse, fqFunctionName, 1);
@@ -798,7 +796,7 @@
   String policy = null;
 }
 {
-   "using" "policy" policy = Identifier()
+   <USING> <POLICY> policy = Identifier()
    {
      return policy;
    }
@@ -811,7 +809,7 @@
   int arity = 0;
 }
 {
-  fctName = FunctionName() "@" <INTEGER_LITERAL>
+  fctName = FunctionName() <SYMBOLAT><INTEGER_LITERAL>
     {
       arity = new Integer(token.image);
       if (arity < 0 && arity != FunctionIdentifier.VARARGS) {
@@ -831,7 +829,7 @@
    List<List<String>> primaryKeyFields = new ArrayList<List<String>>();
 }
 {
-  "primary" "key" tmp = NestedField()
+  <PRIMARY> <KEY> tmp = NestedField()
     {
       keyFieldSourceIndicators.add(tmp.first);
       primaryKeyFields.add(tmp.second);
@@ -857,33 +855,33 @@
   Statement stmt = null;
 }
 {
-  "drop"
+  <DROP>
   (
     <DATASET> pairId = QualifiedName() ifExists = IfExists()
       {
         stmt = new DropStatement(pairId.first, pairId.second, ifExists);
       }
-    | "index" tripleId = DoubleQualifiedName() ifExists = IfExists()
+    | <INDEX> tripleId = DoubleQualifiedName() ifExists = IfExists()
       {
         stmt = new IndexDropStatement(tripleId.first, tripleId.second, 
tripleId.third, ifExists);
       }
-    | "nodegroup" id = Identifier() ifExists = IfExists()
+    | <NODEGROUP> id = Identifier() ifExists = IfExists()
       {
         stmt = new NodeGroupDropStatement(new Identifier(id), ifExists);
       }
-    | "type" pairId = TypeName() ifExists = IfExists()
+    | <TYPE> pairId = TypeName() ifExists = IfExists()
       {
         stmt = new TypeDropStatement(pairId.first, pairId.second, ifExists);
       }
-    | "dataverse" id = Identifier() ifExists = IfExists()
+    | <DATAVERSE> id = Identifier() ifExists = IfExists()
       {
         stmt = new DataverseDropStatement(new Identifier(id), ifExists);
       }
-    | "function" funcSig = FunctionSignature() ifExists = IfExists()
+    | <FUNCTION>funcSig = FunctionSignature() ifExists = IfExists()
       {
         stmt = new FunctionDropStatement(funcSig, ifExists);
       }
-    | "feed" pairId = QualifiedName() ifExists = IfExists()
+    | <FEED> pairId = QualifiedName() ifExists = IfExists()
       {
         stmt = new FeedDropStatement(pairId.first, pairId.second, ifExists);
       }
@@ -897,7 +895,7 @@
 {
 }
 {
-  ( <IF> "exists"
+  ( <IF> <EXISTS>
     {
       return true;
     }
@@ -914,7 +912,7 @@
   boolean upsert = false;
 }
 {
-  ("insert"|"upsert"{ upsert = true; }) "into" <DATASET> nameComponents = 
QualifiedName() query = Query()
+  (<INSERT>|<UPSERT>{ upsert = true; }) <INTO> <DATASET> nameComponents = 
QualifiedName() query = Query()
     {
       query.setTopLevel(true);
       if(upsert){
@@ -936,7 +934,7 @@
 
 }
 {
-  "delete" var = Variable()
+  <DELETE> var = Variable()
     {
       getCurrentScope().addNewVarSymbolToScope(var.getVar());
     }
@@ -963,7 +961,7 @@
   List<UpdateClause> ucs = new ArrayList<UpdateClause>();
 }
 {
-  "update" vars = Variable() <IN> target = Expression()
+  <UPDATE> vars = Variable() <IN> target = Expression()
   <WHERE> condition = Expression()
   <LEFTPAREN> (uc = UpdateClause()
     {
@@ -991,7 +989,7 @@
   UpdateClause elsebranch = null;
 }
 {
-   ("set" target = Expression() <ASSIGN> value = Expression()
+   (<SET> target = Expression() <ASSIGN> value = Expression()
    | is = InsertStatement()
    | ds = DeleteStatement()
    | us = UpdateStatement()
@@ -1010,7 +1008,7 @@
   String pv = null;
 }
 {
-  "set" pn = Identifier() pv = StringLiteral()
+  <SET> pn = Identifier() pv = StringLiteral()
     {
       return new SetStatement(pn, pv);
     }
@@ -1025,8 +1023,8 @@
   Pair<Identifier,Identifier> nameComponents = null;
 }
 {
-  "write" "output" "to" nodeName = Identifier() <COLON> fileName = 
StringLiteral()
-    ( "using" writerClass = StringLiteral() )?
+  <WRITE> <OUTPUT> <TO> nodeName = Identifier() <COLON> fileName = 
StringLiteral()
+    ( <USING> writerClass = StringLiteral() )?
     {
       return new WriteStatement(new Identifier(nodeName), fileName, 
writerClass);
     }
@@ -1042,13 +1040,13 @@
   Pair<Identifier,Identifier> nameComponents = null;
 }
 {
-  "load" <DATASET> nameComponents = QualifiedName()
+  <LOAD> <DATASET> nameComponents = QualifiedName()
     {
       dataverseName = nameComponents.first;
       datasetName = nameComponents.second;
     }
-  "using" adapterName = AdapterName() properties = Configuration()
-  ("pre-sorted"
+  <USING> adapterName = AdapterName() properties = Configuration()
+  (<PRESORTED>
     {
       alreadySorted = true;
     }
@@ -1076,7 +1074,7 @@
   Statement stmt = null;
 }
 {
-  "compact" <DATASET> nameComponents = QualifiedName()
+  <COMPACT> <DATASET> nameComponents = QualifiedName()
     {
       stmt = new CompactStatement(nameComponents.first, nameComponents.second);
     }
@@ -1096,11 +1094,11 @@
 }
 {
   (
-    "connect" "feed" feedNameComponents = QualifiedName() "to" <DATASET> 
datasetNameComponents = QualifiedName() (policy = GetPolicy())?
+    <CONNECT> <FEED> feedNameComponents = QualifiedName() <TO> <DATASET> 
datasetNameComponents = QualifiedName() (policy = GetPolicy())?
       {
         stmt = new ConnectFeedStatement(feedNameComponents, 
datasetNameComponents, policy, getVarCounter());
       }
-    | "disconnect" "feed" feedNameComponents = QualifiedName() <FROM> 
<DATASET> datasetNameComponents = QualifiedName()
+    | <DISCONNECT> <FEED> feedNameComponents = QualifiedName() <FROM> 
<DATASET> datasetNameComponents = QualifiedName()
       {
         stmt = new DisconnectFeedStatement(feedNameComponents, 
datasetNameComponents);
       }
@@ -1219,8 +1217,8 @@
   RecordTypeDefinition.RecordKind recordKind = null;
 }
 {
-  ( "closed" { recordKind = RecordTypeDefinition.RecordKind.CLOSED; }
-    | "open" { recordKind = RecordTypeDefinition.RecordKind.OPEN; } )?
+  ( <CLOSED>{ recordKind = RecordTypeDefinition.RecordKind.CLOSED; }
+    | <OPEN>{ recordKind = RecordTypeDefinition.RecordKind.OPEN; } )?
    <LEFTBRACE>
     {
       String hint = getHint(token);
@@ -1331,7 +1329,7 @@
     {
       secondAfterDot = true;
     }
-  ("#" third = Identifier())? | "#" second = Identifier() )?
+  (<SYMBOLHASH> third = Identifier())? | <SYMBOLHASH> second = Identifier() )?
     {
       if (second == null) {
         result.dataverse = defaultDataverse;
@@ -1380,7 +1378,7 @@
   String lit = null;
 }
 {
-  (<IDENTIFIER>
+  ((<IDENTIFIER>|<NOT>)
     {
       return token.image;
     }
@@ -1510,7 +1508,7 @@
   createNewScope();
 }
 {
-  "declare" "function" functionName = Identifier()
+  <DECLARE><FUNCTION>functionName = Identifier()
   paramList = ParameterList()
   <LEFTBRACE> funcBody = Expression() <RIGHTBRACE>
     {
@@ -2518,33 +2516,87 @@
 <DEFAULT,IN_DBL_BRACE>
 TOKEN :
 {
-    <ASC : "asc">
+    <APPLY : "apply">
+  | <AS : "as">
+  | <ASC : "asc">
   | <AT : "at">
+  | <AUTOGENERATED : "autogenerated">
+  | <BTREE : "btree">
   | <BY : "by">
+  | <CLOSED : "closed">
+  | <COMPACT : "compact">
+  | <COMPACTION : "compaction">
+  | <CONNECT : "connect">
+  | <CREATE : "create">
   | <DATASET : "dataset">
+  | <DATAVERSE : "dataverse">
+  | <DECLARE : "declare">
   | <DECOR : "decor">
+  | <DEFINITION : "definition">
+  | <DELETE : "delete">
   | <DESC : "desc">
+  | <DISCONNECT : "disconnect">
   | <DISTINCT : "distinct">
+  | <DROP : "drop">
   | <ELSE : "else">
+  | <ENFORCED : "enforced">
   | <EVERY : "every">
+  | <EXISTS : "exists">
+  | <EXTERNAL : "external">
+  | <FEED : "feed">
+  | <FILTER : "filter">
   | <FOR : "for">
+  | <FORMAT : "format">
   | <FROM : "from">
+  | <FUNCTION : "function">
   | <GROUP : "group">
+  | <HINTS : "hints">
   | <IF : "if">
   | <IN : "in">
+  | <INDEX : "index">
+  | <INGESTION : "ingestion">
+  | <INSERT : "insert">
+  | <INTERNAL : "internal">
+  | <INTO : "into">
+  | <KEY : "key">
+  | <KEYWORD : "keyword">
+  | <KEEPING : "keeping">
   | <LET : "let">
   | <LIMIT : "limit">
+  | <LOAD : "load">
+  | <NGRAM : "ngram">
+  | <NODEGROUP : "nodegroup">
+  | <NOT : "not">
   | <OFFSET : "offset">
+  | <ON : "on">
+  | <OPEN : "open">
   | <ORDER : "order">
+  | <OUTPUT : "output">
+  | <PATH : "path">
+  | <POLICY : "policy">
+  | <PRESORTED : "pre-sorted">
+  | <PRIMARY : "primary">
+  | <REFRESH : "refresh">
   | <RETURN : "return">
+  | <RTREE : "rtree">
+  | <RUN : "run">
   | <SATISFIES : "satisfies">
+  | <SECONDARY : "secondary">
   | <SELECT : "select">
+  | <SET : "set">
   | <SOME : "some">
+  | <TEMPORARY : "temporary">
   | <THEN : "then">
+  | <TO : "to">
+  | <TYPE : "type">
   | <UNION : "union">
+  | <UPDATE : "update">
+  | <UPSERT : "upsert">
+  | <USE : "use">
+  | <USING : "using">
   | <WHERE : "where">
   | <WITH : "with">
-  | <KEEPING : "keeping">
+  | <WRITE : "write">
 }
 
 <DEFAULT,IN_DBL_BRACE>
@@ -2579,6 +2631,9 @@
 
   | <AND : "and">
   | <OR : "or">
+  
+  | <SYMBOLAT : "@">
+  | <SYMBOLHASH : "#">
 }
 
 <DEFAULT,IN_DBL_BRACE>
diff --git 
a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
 
b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
index 6ab82c1..dc0fa93 100644
--- 
a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
+++ 
b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
@@ -222,8 +222,11 @@
 
     public String extractFragment(int beginLine, int beginColumn, int endLine, 
int endColumn) {
         StringBuilder extract = new StringBuilder();
-        extract.append(inputLines[beginLine - 1].trim().length() > 1
-                ? inputLines[beginLine - 1].trim().substring(beginColumn) : 
"");
+        if (beginLine == endLine) {
+            // special case that we need to handle separately
+            return inputLines[beginLine - 1].substring(beginColumn, endColumn 
- 1).trim();
+        }
+        extract.append(inputLines[beginLine - 1].substring(beginColumn));
         for (int i = beginLine + 1; i < endLine; i++) {
             extract.append("\n");
             extract.append(inputLines[i - 1]);

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/988
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I18ac4f8d86b3c5c7bfe226c98114499671649e93
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: abdullah alamoudi <[email protected]>

Reply via email to