http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
deleted file mode 100644
index 9f9e053..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*;
-import 
org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import 
org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-public class HiveSavedQueryMigrationUtility {
-
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> 
getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  public MigrationModel hiveSavedQueryMigration(String username, String 
instance, String startDate, String endDate, ViewContext view, MigrationResponse 
migrationresult, String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = 
Logger.getLogger(HiveSavedQueryMigrationUtility.class);
-
-    Connection connectionAmbaridb = null;
-    Connection connectionHuedb = null;
-
-    int i = 0;
-
-    logger.info("-------------------------------------");
-    logger.info("hive saved query Migration started");
-    logger.info("-------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + instance);
-    logger.info("hue username is : " + username);
-
-    HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new 
HiveSavedQueryMigrationImplementation();/* creating Implementation object  */
-
-    QuerySet huedatabase=null;
-
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-     huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
-    }
-
-
-    QuerySetAmbariDB ambaridatabase=null;
-
-
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else 
if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
-    }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
-    }
-
-    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
-    String time = null;
-    Long epochtime = null;
-    String dirNameforHiveSavedquery;
-    ArrayList<HiveModel> dbpojoHiveSavedQuery = new ArrayList<HiveModel>();
-
-    try {
-
-      connectionHuedb = 
DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), 
view.getProperties().get("huejdbcurl"), 
view.getProperties().get("huedbusername"), 
view.getProperties().get("huedbpassword")).getConnection(); /* fetching 
connection to hue DB */
-
-      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, 
startDate, endDate, connectionHuedb,huedatabase); /* fetching data from hue db 
and storing it in to a model */
-
-
-      for(int j=0;j<dbpojoHiveSavedQuery.size();j++)
-      {
-        logger.info("the query fetched from 
hue"+dbpojoHiveSavedQuery.get(j).getQuery());
-
-      }
-
-
-      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched 
from hue db according to search criteria */ {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
-        getResourceManager(view).update(migrationresult, jobid);
-        logger.info("No queries has been selected acccording to your 
criteria");
-
-        logger.info("no hive saved query has been selected from hue according 
to your criteria of searching");
-
-
-      } else {
-
-        connectionAmbaridb = 
DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),
 view.getProperties().get("ambarijdbcurl"), 
view.getProperties().get("ambaridbusername"), 
view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to 
ambari DB */
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-
-          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
-          migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-
-
-
-          logger.info("query fetched from hue:-  " + 
dbpojoHiveSavedQuery.get(i).getQuery());
-
-          int tableIdSavedQuery = 
hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, 
instance,ambaridatabase); /* fetching the instance table name for migration 
saved query  from the given instance name */
-
-          int tableIdHistoryHive = 
hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(connectionAmbaridb, 
instance,ambaridatabase); /* fetching the instance table name for migration 
history query from the given instance name */
-
-          logger.info("Table name are fetched from instance name.");
-
-          
hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), 
ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a 
local file*/
-
-          
hivesavedqueryimpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());/*
 writing logs to localfile */
-
-          logger.info(".hql and logs file are saved in temporary directory");
-
-          maxcountForHivehistroryAmbaridb = 
(hivesavedqueryimpl.fetchMaxdsidFromHiveHistory( connectionAmbaridb, 
tableIdHistoryHive,ambaridatabase) + 1);/* fetching the maximum ds_id from 
migration history table*/
-
-          maxCountforSavequeryAmbaridb = 
(hivesavedqueryimpl.fetchMaxidforSavedQueryHive(connectionAmbaridb, 
tableIdSavedQuery,ambaridatabase) + 1);/* fetching the maximum ds_id from 
migration saved query table*/
-
-          time = hivesavedqueryimpl.getTime();/* getting system time */
-
-          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time 
*/
-
-          dirNameforHiveSavedquery = 
"/user/admin/migration/jobs/migration-job-" + maxcountForHivehistroryAmbaridb + 
"-"
-            + time + "/"; // creating hdfs directory name
-
-          logger.info("Directory will be creted in HDFS" + 
dirNameforHiveSavedquery);
-
-          
hivesavedqueryimpl.insertRowHiveHistory(dirNameforHiveSavedquery,maxcountForHivehistroryAmbaridb,epochtime,connectionAmbaridb,tableIdHistoryHive,instance,i,ambaridatabase);//
 inserting to migration history table
-
-          logger.info("Row inserted in hive History table.");
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("Kerberose Enabled");
-            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs 
in kerborized cluster
-            
hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir()
 + "query.hql", dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs 
in kerberoroized cluster
-            
hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir()
 + "logs", dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs 
in kerberoroized cluster
-
-          } else {
-
-            logger.info("Kerberose Not Enabled");
-            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            
hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() 
+ "query.hql", dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs 
directory
-            
hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() 
+ "logs", dirNameforHiveSavedquery, 
view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
-          }
-
-          //inserting into hived saved query table
-          //6.
-          
hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, 
dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, 
dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), 
connectionAmbaridb, tableIdSavedQuery, instance, i,ambaridatabase);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-
-      logger.error("SQL exception: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("Rollback error: ", e1);
-
-      }
-    } catch (ClassNotFoundException e1) {
-      logger.error("Class not found : " , e1);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException:" , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("Error in connection close", e);
-        }
-    }
-
-
-    
hivesavedqueryimpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
-    
hivesavedqueryimpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    MigrationModel model=new MigrationModel();
-
-    migrationresult.setJobtype("hivesavedquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-
-    logger.info("-------------------------------");
-    logger.info("hive saved query Migration end");
-    logger.info("--------------------------------");
-
-    return model;
-
-  }
-}
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
deleted file mode 100644
index 0445132..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class HiveSavedQueryStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public HiveSavedQueryStartJob(String username, String instance, String 
startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    JSONObject response = new JSONObject();
-
-    /**
-     * creating a separate thread
-     */
-
-    HiveSavedQueryMigrationUtility hivesavedquery=new 
HiveSavedQueryMigrationUtility();
-    try {
-      
hivesavedquery.hiveSavedQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
deleted file mode 100644
index 64e7069..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.io.*;
-import java.net.URISyntaxException;
-;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySet;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-import 
org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-
-public class PigJobMigrationImplementation {
-
-  static final Logger logger = 
Logger.getLogger(PigJobMigrationImplementation.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String 
instance, int i) throws IOException {
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-    XMLOutputter xmlOutput = new XMLOutputter();
-    xmlOutput.setFormat(Format.getPrettyFormat());
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml");
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-        Element rootNode = doc.getRootElement();
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new 
Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new 
FileWriter(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml"));
-      } catch (JDOMException e) {
-
-        logger.error("Jdom Exception: ", e);
-      }
-
-
-    } else {
-      // create
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new 
Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new 
FileWriter(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml"));
-      } catch (IOException io) {
-        logger.error("Jdom Exception: ", io);
-      }
-
-    }
-
-  }
-
-  public int fetchMaxIdforPigJob(Connection c, int id, QuerySetAmbariDB 
ambaridatabase) throws SQLException {
-
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-
-  }
-
-  public int fetchInstanceTablename(Connection c, String instance, 
QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-  }
-
-  public void insertRowPigJob(String dirname, int maxcountforpigjob, String 
time, String time2, long epochtime, String title, Connection c, int id, String 
status, String instance, int i, QuerySetAmbariDB ambaridatabase) throws 
SQLException, IOException {
-
-    String epochtime1 = Long.toString(epochtime);
-    String maxcountforpigjob1 = Integer.toString(maxcountforpigjob);
-    String ds_id = new String();
-    String revSql;
-
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.insertToPigJob(dirname, maxcountforpigjob1, 
epochtime, title, c, id, status);
-
-    prSt.executeUpdate();
-
-    revSql = ambaridatabase.revertSql(id, maxcountforpigjob1);
-
-    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
-
-  }
-
-  public long getEpochTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return epoch;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return s;
-
-  }
-
-  public String getTimeInorder() throws ParseException {
-    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd 
HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
-    Date now = new Date();
-    String strDate = sdfDate.format(now);
-    return strDate;
-  }
-
-  public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, 
String endtime, Connection connection, QuerySet huedatabase) throws 
ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    String[] query = new String[100];
-    ArrayList<PigModel> pigjobarraylist = new ArrayList<PigModel>();
-    try {
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      Statement statement = connection.createStatement();
-      ResultSet rs;
-
-      ResultSet rs1 = null;
-      if (username.equals("all")) {
-      } else {
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, 
endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, 
startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, 
endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, 
startdate, endtime);
-        }
-
-
-      }
-
-      rs1 = prSt.executeQuery();
-
-
-      while (rs1.next()) {
-        PigModel pigjjobobject = new PigModel();
-
-        int runstatus = rs1.getInt("status");
-
-        if (runstatus == 1) {
-          pigjjobobject.setStatus("RUNNING");
-        } else if (runstatus == 2) {
-          pigjjobobject.setStatus("SUCCEEDED");
-        } else if (runstatus == 3) {
-          pigjjobobject.setStatus("SUBMIT_FAILED");
-        } else if (runstatus == 4) {
-          pigjjobobject.setStatus("KILLED");
-        }
-        String title = rs1.getString("script_title");
-
-
-        pigjjobobject.setTitle(title);
-        String dir = rs1.getString("statusdir");
-        pigjjobobject.setDir(dir);
-        Date created_data = rs1.getDate("start_time");
-        pigjjobobject.setDt(created_data);
-
-        pigjobarraylist.add(pigjjobobject);
-
-        i++;
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sqlexception: ", e);
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("Sqlexception in closing the connection: ", e);
-
-      }
-    }
-
-    return pigjobarraylist;
-
-  }
-
-  public void createDirPigJob(final String dir, final String namenodeuri) 
throws IOException,
-    URISyntaxException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-  }
-
-  /**/
-  public void createDirPigJobSecured(final String dir, final String 
namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-  }
-
-  /**/
-  public void copyFileBetweenHdfs(final String source, final String dest, 
final String nameNodeuriAmbari, final String nameNodeuriHue)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration confAmbari = new Configuration();
-          confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
-          confAmbari.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
-
-          Configuration confHue = new Configuration();
-          confHue.set("fs.defaultFS", nameNodeuriAmbari);
-          confHue.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystemHue = FileSystem.get(confHue);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path1 = new Path(source);
-          FSDataInputStream in1 = fileSystemHue.open(path1);
-
-          Path path = new Path(dest1);
-          if (fileSystemAmbari.exists(path)) {
-
-          }
-
-          FSDataOutputStream out = fileSystemAmbari.create(path);
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in1.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in1.close();
-          out.close();
-          fileSystemAmbari.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-
-  }
-
-  /**/
-  public void copyFileBetweenHdfsSecured(final String source, final String 
dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
-    throws IOException {
-
-    try {
-
-      final Configuration confAmbari = new Configuration();
-      confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
-      confAmbari.set("hadoop.job.ugi", "hdfs");
-
-      final Configuration confHue = new Configuration();
-      confHue.set("fs.defaultFS", nameNodeuriAmbari);
-      confHue.set("hadoop.job.ugi", "hdfs");
-
-      confAmbari.set("hadoop.security.authentication", "Kerberos");
-      confHue.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
-
-          FileSystem fileSystemHue = FileSystem.get(confHue);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path1 = new Path(source);
-          FSDataInputStream in1 = fileSystemHue.open(path1);
-
-          Path path = new Path(dest1);
-          if (fileSystemAmbari.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystemAmbari.create(path);
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in1.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in1.close();
-          out.close();
-          fileSystemAmbari.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
deleted file mode 100644
index 5d99b49..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import org.apache.ambari.view.ViewContext;
-
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.log4j.Logger;
-
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-
-public class PigJobMigrationUtility  {
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> 
getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  public void pigJobMigration(String username, String instance, String 
startDate, String endDate, ViewContext view, MigrationResponse migrationresult, 
String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("------------------------------");
-    logger.info("pig Jobs Migration started");
-    logger.info("------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    PigJobMigrationImplementation pigjobimpl = new 
PigJobMigrationImplementation();// creating the implementation object
-
-    QuerySet huedatabase=null;
-
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-      huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
-    }
-
-    QuerySetAmbariDB ambaridatabase=null;
-
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else 
if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
-    }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
-    }
-    int maxCountforPigScript = 0,i=0;
-
-    String time = null, timeIndorder = null;
-    Long epochtime = null;
-    String pigJobDirName;
-    ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
-
-    try {
-
-      connectionHuedb = 
DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), 
view.getProperties().get("huejdbcurl"), 
view.getProperties().get("huedbusername"), 
view.getProperties().get("huedbpassword")).getConnection();//connecting to hue 
database
-
-      pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, 
connectionHuedb,huedatabase);// fetching the PigJobs details from hue
-
-      for(int j=0;j<pigJobDbPojo.size();j++)
-      {
-        logger.info("the query fetched from 
hue="+pigJobDbPojo.get(i).getScript());
-
-      }
-
-                       /*No pig Job details has been fetched accordring to 
search criteria*/
-      if (pigJobDbPojo.size() == 0) {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(pigJobDbPojo.size());
-        getResourceManager(view).update(migrationresult, jobid);
-        logger.info("no pig Job has been selected from hue according to your 
criteria of searching");
-
-      } else {
-
-        connectionAmbaridb = 
DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),
 view.getProperties().get("ambarijdbcurl"), 
view.getProperties().get("ambaridbusername"), 
view.getProperties().get("ambaridbpassword")).getConnection();
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < pigJobDbPojo.size(); i++) {
-
-          float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
-          int progressPercentage = Math.round(calc);
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
-          migrationresult.setTotalNoQuery(pigJobDbPojo.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-
-
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
-
-          int fetchPigTablenameInstance = 
pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance,ambaridatabase);
-
-          maxCountforPigScript = 
(pigjobimpl.fetchMaxIdforPigJob(connectionAmbaridb, 
fetchPigTablenameInstance,ambaridatabase) + 1);
-
-          time = pigjobimpl.getTime();
-          timeIndorder = pigjobimpl.getTimeInorder();
-          epochtime = pigjobimpl.getEpochTime();
-
-          pigJobDirName = "/user/admin/pig/jobs/" + 
pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
-
-          pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, 
time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), 
connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), 
instance, i,ambaridatabase);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            pigjobimpl.createDirPigJobSecured(pigJobDirName, 
view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() 
+ "/script.pig", pigJobDirName, 
view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() 
+ "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() 
+ "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-
-          } else {
-
-            pigjobimpl.createDirPigJob(pigJobDirName, 
view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + 
"/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + 
"/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + 
"/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), 
view.getProperties().get("namenode_URI_Hue"));
-
-          }
-
-          logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to 
Ambari");
-
-        }
-        connectionAmbaridb.commit();
-      }
-
-    } catch (SQLException e) {
-      logger.error("sql exception in ambari database:", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("roll back  exception:",e1);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception:",e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: " ,e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException" ,e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException" ,e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection closing exception ", e);
-        }
-    }
-
-    logger.info("------------------------------");
-    logger.info("pig Job Migration End");
-    logger.info("------------------------------");
-
-    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-//    CheckProgresStatus.setProgressPercentage(0);
-//    CheckProgresStatus.setNoOfQueryCompleted(0);
-//    CheckProgresStatus.setTotalNoOfQuery(0);
-//    CheckProgresStatus.setNoOfQueryLeft(0);
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    migrationresult.setJobtype("hivehistoryquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
deleted file mode 100644
index 15f033f..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class PigJobStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public PigJobStartJob(String username, String instance, String startdate, 
String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    PigJobMigrationUtility pigjobmigration=new PigJobMigrationUtility();
-    try {
-      
pigjobmigration.pigJobMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
deleted file mode 100644
index c5f073c..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class PigSavedScriptStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public PigSavedScriptStartJob(String username, String instance, String 
startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    PigScriptMigrationUtility pigsavedscript =new PigScriptMigrationUtility();
-    try {
-      
pigsavedscript.pigScriptMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
deleted file mode 100644
index c8aa1c0..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
+++ /dev/null
@@ -1,504 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import 
org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob.PigJobMigrationImplementation;
-import 
org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.QuerySetAmbariDB;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.QuerySet;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-import java.io.*;
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-
-
-public class PigScriptMigrationImplementation {
-
-  static final Logger logger = 
Logger.getLogger(PigJobMigrationImplementation.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String 
instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-
-        Element rootNode = doc.getRootElement();
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new 
Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new 
FileWriter(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml"));
-
-      } catch (JDOMException e) {
-        logger.error("JDOMException: ", e);
-      }
-
-
-    } else {
-      // create
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new 
Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        doc.getRootElement().addContent(record);
-
-        xmlOutput.output(doc, new 
FileWriter(ConfigurationCheckImplementation.getHomeDir() + 
"RevertChangesService.xml"));
-
-      } catch (IOException io) {
-        logger.error("IOException: ", io);
-
-      }
-
-    }
-
-
-  }
-
-  public int fetchInstanceTablenamePigScript(Connection c, String instance, 
QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-
-  }
-
-  public int fetchmaxIdforPigSavedScript(Connection c, int id, 
QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-  public void insertRowForPigScript(String dirname, int maxcountforpigjob, int 
maxcount, String time, String time2, long epochtime, String title, Connection 
c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws 
SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String epochtime1 = Long.toString(epochtime);
-    String revSql = null;
-
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.insertToPigScript(c, id, maxcount1, dirname, title);
-
-    prSt.executeUpdate();
-
-    revSql = ambaridatabase.revertSql(id, maxcount1);
-
-    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
-
-  }
-
-
-  public long getEpochTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return epoch;
-
-  }
-
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + 
minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return s;
-
-  }
-
-
-  public String getTimeInorder() throws ParseException {
-    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd 
HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
-    Date now = new Date();
-    String strDate = sdfDate.format(now);
-    return strDate;
-  }
-
-
-  public ArrayList<PigModel> fetchFromHueDatabase(String username, String 
startdate, String endtime, Connection connection, QuerySet huedatabase) throws 
ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    ResultSet rs1 = null;
-    String[] query = new String[100];
-    ArrayList<PigModel> pigArrayList = new ArrayList<PigModel>();
-    try {
-      Statement statement = connection.createStatement();
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      ResultSet rs;
-      if (username.equals("all")) {
-      } else {
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, 
endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, 
startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = 
huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, 
endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, 
startdate, endtime);
-        }
-
-
-      }
-
-      rs1 = prSt.executeQuery();
-
-
-      // rs1 = statement.executeQuery("select 
pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 
AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime 
+"';");
-      while (rs1.next()) {
-        PigModel pojopig = new PigModel();
-        String script = rs1.getString("pig_script");
-        String title = rs1.getString("title");
-        Date created_data = rs1.getDate("date_created");
-        pojopig.setDt(created_data);
-        pojopig.setScript(script);
-        pojopig.setTitle(title);
-
-        pigArrayList.add(pojopig);
-        i++;
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("SQLException", e);
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("SQLException", e);
-      }
-    }
-
-    return pigArrayList;
-
-  }
-
-  public void writetPigScripttoLocalFile(String script, String title, Date 
createddate, String homedir, String filename2) {
-    try {
-      logger.info(homedir + filename2);
-      File file = new File(homedir + filename2);
-
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(script);
-      bw.close();
-
-
-    } catch (IOException e) {
-
-      logger.error("IOException", e);
-    }
-
-  }
-
-  public void deletePigScriptLocalFile(String homedir, String filename2) {
-    try {
-
-      File file = new File(homedir + filename2);
-
-      if (file.delete()) {
-        logger.info("Temproray file deleted");
-      } else {
-        logger.info("Temproray file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception: ", e);
-
-    }
-
-  }
-
-  public void putFileinHdfs(final String source, final String dest, final 
String namenodeuri)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs Exception: ", e);
-    }
-
-  }
-
-  public void putFileinHdfsSecured(final String source, final String dest, 
final String namenodeuri)
-    throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          //   Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs Exception: ", e);
-
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
deleted file mode 100644
index 44e27c1..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.*;
-import 
org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.*;
-import 
org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import 
org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-public class PigScriptMigrationUtility {
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> 
getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-
-  public void pigScriptMigration(String username, String instance, String 
startDate, String endDate, ViewContext view, MigrationResponse migrationresult, 
String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(PigScriptMigrationUtility.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("-------------------------------------");
-    logger.info("pig saved script Migration started");
-    logger.info("-------------------------------------");
-
-
-    int i = 0;
-
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    //Reading the configuration file
-    PigScriptMigrationImplementation pigsavedscriptmigration = new 
PigScriptMigrationImplementation();
-
-    QuerySet huedatabase = null;
-
-    if (view.getProperties().get("huedrivername").contains("mysql")) {
-      huedatabase = new MysqlQuerySet();
-    } else if 
(view.getProperties().get("huedrivername").contains("postgresql")) {
-      huedatabase = new PostgressQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
-
-      huedatabase = new SqliteQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-      huedatabase = new OracleQuerySet();
-    }
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if 
(view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if 
(view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-    int maxcountforsavequery = 0, maxcountforpigsavedscript;
-    String time = null, timetobeInorder = null;
-    Long epochTime = null;
-    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename = "";
-    int pigInstanceTableName;
-
-    ArrayList<PigModel> dbpojoPigSavedscript = new ArrayList<PigModel>();
-
-    try {
-      connectionHuedb = 
DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), 
view.getProperties().get("huejdbcurl"), 
view.getProperties().get("huedbusername"), 
view.getProperties().get("huedbpassword")).getConnection();//connection to Hue 
DB
-      dbpojoPigSavedscript = 
pigsavedscriptmigration.fetchFromHueDatabase(username, startDate, endDate, 
connectionHuedb, huedatabase);// Fetching pig script details from Hue DB
-
-      for (int j = 0; j < dbpojoPigSavedscript.size(); j++) {
-        logger.info("the query fetched from hue=" + 
dbpojoPigSavedscript.get(j).getScript());
-
-      }
-
-
-      /* If No pig Script has been fetched from Hue db according to our search 
criteria*/
-      if (dbpojoPigSavedscript.size() == 0) {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
-        getResourceManager(view).update(migrationresult, jobid);
-
-        logger.info("no pig script has been selected from hue according to 
your criteria of searching");
-
-
-      } else {
-
-        connectionAmbaridb = 
DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),
 view.getProperties().get("ambarijdbcurl"), 
view.getProperties().get("ambaridbusername"), 
view.getProperties().get("ambaridbpassword")).getConnection();// connecting to 
ambari db
-        connectionAmbaridb.setAutoCommit(false);
-        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + 
"times");
-
-        //for each pig script found in Hue Database
-
-        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
-
-
-          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
-          int progressPercentage = Math.round(calc);
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i + 1);
-          migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script:  " + 
dbpojoPigSavedscript.get(i).getTitle());
-
-          pigInstanceTableName = 
pigsavedscriptmigration.fetchInstanceTablenamePigScript(connectionAmbaridb, 
instance, ambaridatabase);// finding the table name in ambari from the given 
instance
-
-          maxcountforpigsavedscript = 
(pigsavedscriptmigration.fetchmaxIdforPigSavedScript(connectionAmbaridb, 
pigInstanceTableName, ambaridatabase) + 1);// maximum count of the primary key 
of pig Script table
-
-          time = pigsavedscriptmigration.getTime();
-
-          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
-
-          epochTime = pigsavedscriptmigration.getEpochTime();
-
-          dirNameForPigScript = "/user/admin/pig/scripts/";
-
-          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + 
time + ".pig";
-
-          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
-
-          
pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(),
 dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), 
ConfigurationCheckImplementation.getHomeDir(), pigscriptFilename);
-
-          
pigsavedscriptmigration.insertRowForPigScript(completeDirandFilePath, 
maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, 
epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, 
pigInstanceTableName, instance, i, ambaridatabase);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-            
pigsavedscriptmigration.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir()
 + pigscriptFilename, dirNameForPigScript, 
view.getProperties().get("namenode_URI_Ambari"));
-          } else {
-            
pigsavedscriptmigration.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir()
 + pigscriptFilename, dirNameForPigScript, 
view.getProperties().get("namenode_URI_Ambari"));
-          }
-
-          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to 
Ambari");
-
-          
pigsavedscriptmigration.deletePigScriptLocalFile(ConfigurationCheckImplementation.getHomeDir(),
 pigscriptFilename);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("rollback done");
-      } catch (SQLException e1) {
-        logger.error("Sql exception while doing roll back", e);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception", e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: ", e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ", e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection close exception: ", e);
-        }
-    }
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-
-    migrationresult.setJobtype("hivehistoryquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-    logger.info("----------------------------------");
-    logger.info("pig saved script Migration ends");
-    logger.info("----------------------------------");
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
 
b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
deleted file mode 100644
index 31cec2f..0000000
--- 
a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.revertchange;
-
-import org.apache.ambari.view.ViewContext;
-import 
org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import 
org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class RevertChangeStartJob extends Thread{
-
-
-  String instance;
-  String revertdate;
-  String jobid;
-  ViewContext view;
-
-  public RevertChangeStartJob(String instance, String revertdate, String 
jobid, ViewContext view) {
-
-    this.instance=instance;
-    this.revertdate=revertdate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setProgressPercentage(0);
-
-    JSONObject response = new JSONObject();
-
-
-    RevertChangeUtility revertchange = new RevertChangeUtility();
-    try {
-      
revertchange.revertChangeUtility(instance,revertdate,jobid,view,migrationresult);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

Reply via email to