multiexceptions in HDFSFunctions, changed java version from 1.6 to 1.7 to 
accept the multiexceptions, defined better the ip configuration in the 
documentation


Project: http://git-wip-us.apache.org/repos/asf/vxquery/repo
Commit: http://git-wip-us.apache.org/repos/asf/vxquery/commit/c85e5566
Tree: http://git-wip-us.apache.org/repos/asf/vxquery/tree/c85e5566
Diff: http://git-wip-us.apache.org/repos/asf/vxquery/diff/c85e5566

Branch: refs/heads/steven/hdfs
Commit: c85e5566212349ecaf24aa613b8e177b8891d98b
Parents: 94801fb
Author: efikalti <[email protected]>
Authored: Sat Oct 24 12:02:44 2015 +0300
Committer: efikalti <[email protected]>
Committed: Sat Oct 24 12:02:44 2015 +0300

----------------------------------------------------------------------
 pom.xml                                         |  4 +--
 src/site/apt/user_query_hdfs.apt                | 14 ++++++++
 .../org/apache/vxquery/hdfs2/HDFSFunctions.java | 36 +++++++-------------
 3 files changed, 28 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/vxquery/blob/c85e5566/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a1e22a6..8128251 100644
--- a/pom.xml
+++ b/pom.xml
@@ -396,8 +396,8 @@
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-compiler-plugin</artifactId>
                 <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
+                    <source>1.7</source>
+                    <target>1.7</target>
                 </configuration>
             </plugin>
             <plugin>

http://git-wip-us.apache.org/repos/asf/vxquery/blob/c85e5566/src/site/apt/user_query_hdfs.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user_query_hdfs.apt b/src/site/apt/user_query_hdfs.apt
index 662d5df..12b04e8 100644
--- a/src/site/apt/user_query_hdfs.apt
+++ b/src/site/apt/user_query_hdfs.apt
@@ -24,6 +24,20 @@ Executing a Query in HDFS
     This information should be defined in the <local.xml> or <cluster.xml> 
file at 
     <vxquery-server/src/main/resources/conf/> .
 
+    You can find the ip of each node located in the /etc/hosts file.There 
should be at
+    least two different ips defined in that file.One for localhost and one 
with the hostname
+    of the node.The correct one for this configuration is the one of the host.
+
+    For example:
+    An ubuntu /etc/hosts file could look like this:
+
+-----------------------------
+127.0.0.1       localhost
+127.0.1.1       node1
+----------------------------
+    
+    The ip <127.0.1.1> along with the hostname <node1> should be defined in 
the <local.xml> or <cluster.xml> file.
+
 
 * 2. Running the Query
 

http://git-wip-us.apache.org/repos/asf/vxquery/blob/c85e5566/vxquery-core/src/main/java/org/apache/vxquery/hdfs2/HDFSFunctions.java
----------------------------------------------------------------------
diff --git 
a/vxquery-core/src/main/java/org/apache/vxquery/hdfs2/HDFSFunctions.java 
b/vxquery-core/src/main/java/org/apache/vxquery/hdfs2/HDFSFunctions.java
index b6655ed..4a562cf 100644
--- a/vxquery-core/src/main/java/org/apache/vxquery/hdfs2/HDFSFunctions.java
+++ b/vxquery-core/src/main/java/org/apache/vxquery/hdfs2/HDFSFunctions.java
@@ -102,15 +102,7 @@ public class HDFSFunctions {
             job.setInputFormatClass(XmlCollectionWithTagInputFormat.class);
             inputFormat = 
ReflectionUtils.newInstance(job.getInputFormatClass(), job.getConfiguration());
             splits = inputFormat.getSplits(job);
-        } catch (IOException e) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe(e.getMessage());
-            }
-        } catch (ClassNotFoundException e) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe(e.getMessage());
-            }
-        } catch (InterruptedException e) {
+        } catch (IOException | ClassNotFoundException | InterruptedException 
e) {
             if (LOGGER.isLoggable(Level.SEVERE)) {
                 LOGGER.severe(e.getMessage());
             }
@@ -175,18 +167,22 @@ public class HDFSFunctions {
             // load properties file
             Properties prop = new Properties();
             String propFilePath = 
"../vxquery-server/src/main/resources/conf/cluster.properties";
-            nodeXMLfile = new 
File("../vxquery-server/src/main/resources/conf/local.xml");
+            nodeXMLfile = new 
File("../vxquery-server/src/main/resources/conf/cluster.xml");
+            if(!nodeXMLfile.exists()) { 
+                nodeXMLfile = new 
File("vxquery-server/src/main/resources/conf/cluster.xml");
+                if(!nodeXMLfile.exists()) { 
+                    nodeXMLfile = new 
File("vxquery-server/src/main/resources/conf/local.xml");
+                }
+                if(!nodeXMLfile.exists()) { 
+                    nodeXMLfile = new 
File("../vxquery-server/src/main/resources/conf/local.xml");
+                }
+            }
             try {
                 prop.load(new FileInputStream(propFilePath));
             } catch (FileNotFoundException e) {
                 propFilePath = 
"vxquery-server/src/main/resources/conf/cluster.properties";
-                nodeXMLfile = new 
File("vxquery-server/src/main/resources/conf/local.xml");
                 try {
                     prop.load(new FileInputStream(propFilePath));
-                } catch (FileNotFoundException e1) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe(e1.getMessage());
-                    }
                 } catch (IOException e1) {
                     if (LOGGER.isLoggable(Level.SEVERE)) {
                         LOGGER.severe(e1.getMessage());
@@ -414,15 +410,7 @@ public class HDFSFunctions {
                     RecordReader reader = 
inputFormat.createRecordReader(inputSplits.get(i), context);
                     reader.initialize(inputSplits.get(i), context);
                     return reader;
-                } catch (HyracksDataException e) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe(e.getMessage());
-                    }
-                } catch (IOException e) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe(e.getMessage());
-                    }
-                } catch (InterruptedException e) {
+                } catch (IOException | InterruptedException e) {
                     if (LOGGER.isLoggable(Level.SEVERE)) {
                         LOGGER.severe(e.getMessage());
                     }

Reply via email to