http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
new file mode 100644
index 0000000..624829b
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/Storage.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence;
+
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+/**
+ * Object storage interface
+ */
+public interface Storage {
+  /**
+   * Persist object to DB. It should be Indexed
+   * @param obj object to save
+   */
+  <T extends Indexed> void store(Class<T> model, Indexed obj);
+
+  /**
+   * Load object
+   * @param model bean class
+   * @param id identifier
+   * @return bean instance
+   * @throws ItemNotFound thrown if item with id was not found in DB
+   */
+  <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound;
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param filter filtering strategy (return only those objects that conform 
condition)
+   * @param <T> bean class
+   * @return list of filtered objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<? extends T> model, 
FilteringStrategy filter);
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param <T> bean class
+   * @return list of all objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<T> model);
+
+  /**
+   * Delete object
+   * @param model bean class
+   * @param id identifier
+   */
+  void delete(Class model, Object id) throws ItemNotFound;
+
+  /**
+   * Check is object exists
+   * @param model bean class
+   * @param id identifier
+   * @return true if exists
+   */
+  boolean exists(Class model, Object id);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
new file mode 100644
index 0000000..be69f82
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ContextConfigurationAdapter.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.configuration.Configuration;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Persistence API to Apache Configuration adapter
+ */
+@Deprecated
+public class ContextConfigurationAdapter implements Configuration {
+  private ViewContext context;
+
+  /**
+   * Constructor of adapter
+   * @param context View Context
+   */
+  public ContextConfigurationAdapter(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public Configuration subset(String prefix) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isEmpty() {
+    return context.getInstanceData().isEmpty();
+  }
+
+  @Override
+  public boolean containsKey(String s) {
+    Map<String, String> data = context.getInstanceData();
+    return data.containsKey(s);
+  }
+
+  @Override
+  public void addProperty(String s, Object o) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void setProperty(String s, Object o) {
+    context.putInstanceData(s, o.toString());
+  }
+
+  @Override
+  public void clearProperty(String key) {
+    context.removeInstanceData(key);
+  }
+
+  @Override
+  public void clear() {
+    for (String key : context.getInstanceData().keySet())
+      context.removeInstanceData(key);
+  }
+
+  @Override
+  public Object getProperty(String key) {
+    return context.getInstanceData(key);
+  }
+
+  @Override
+  public Iterator getKeys(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Iterator getKeys() {
+    return context.getInstanceData().keySet().iterator();
+  }
+
+  @Override
+  public Properties getProperties(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean getBoolean(String s) {
+    return getBoolean(s, null);
+  }
+
+  @Override
+  public boolean getBoolean(String s, boolean b) {
+    return getBoolean(s, (Boolean)b);
+  }
+
+  @Override
+  public Boolean getBoolean(String s, Boolean aBoolean) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Boolean.parseBoolean(data):aBoolean;
+  }
+
+  @Override
+  public byte getByte(String s) {
+    return getByte(s, null);
+  }
+
+  @Override
+  public byte getByte(String s, byte b) {
+    return getByte(s, (Byte)b);
+  }
+
+  @Override
+  public Byte getByte(String s, Byte aByte) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Byte.parseByte(data):aByte;
+  }
+
+  @Override
+  public double getDouble(String s) {
+    return getDouble(s, null);
+  }
+
+  @Override
+  public double getDouble(String s, double v) {
+    return getDouble(s, (Double)v);
+  }
+
+  @Override
+  public Double getDouble(String s, Double aDouble) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Double.parseDouble(data):aDouble;
+  }
+
+  @Override
+  public float getFloat(String s) {
+    return getFloat(s, null);
+  }
+
+  @Override
+  public float getFloat(String s, float v) {
+    return getFloat(s, (Float)v);
+  }
+
+  @Override
+  public Float getFloat(String s, Float aFloat) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Float.parseFloat(data):aFloat;
+  }
+
+  @Override
+  public int getInt(String s) {
+    return getInteger(s, null);
+  }
+
+  @Override
+  public int getInt(String s, int i) {
+    return getInteger(s, i);
+  }
+
+  @Override
+  public Integer getInteger(String s, Integer integer) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Integer.parseInt(data):integer;
+  }
+
+  @Override
+  public long getLong(String s) {
+    return getLong(s, null);
+  }
+
+  @Override
+  public long getLong(String s, long l) {
+    return getLong(s, (Long)l);
+  }
+
+  @Override
+  public Long getLong(String s, Long aLong) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Long.parseLong(data):aLong;
+  }
+
+  @Override
+  public short getShort(String s) {
+    return getShort(s, null);
+  }
+
+  @Override
+  public short getShort(String s, short i) {
+    return getShort(s, (Short)i);
+  }
+
+  @Override
+  public Short getShort(String s, Short aShort) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Short.parseShort(data):aShort;
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s, BigDecimal bigDecimal) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s, BigInteger bigInteger) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public String getString(String s) {
+    return context.getInstanceData(s);
+  }
+
+  @Override
+  public String getString(String s, String s2) {
+    String data = getString(s);
+    return (data != null)?data:s2;
+  }
+
+  @Override
+  public String[] getStringArray(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s, List list) {
+    throw new UnsupportedOperationException();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
new file mode 100644
index 0000000..e55d976
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/FilteringStrategy.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Filtering strategy for stored objects
+ */
+public interface FilteringStrategy {
+  /**
+   * Check whether item conforms chosen filter or not
+   * @param item item to check
+   * @return true if item conforms this filter
+   */
+  boolean isConform(Indexed item);
+  String whereStatement();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
new file mode 100644
index 0000000..71d2e55
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Indexed.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Interface to represent item with identifier
+ */
+public interface Indexed {
+  /**
+   * Get the ID
+   * @return ID
+   */
+  String getId();
+
+  /**
+   * Set ID
+   * @param id ID
+   */
+  void setId(String id);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
new file mode 100644
index 0000000..cf69677
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/ItemNotFound.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Thrown when item was not found in DB
+ */
+public class ItemNotFound extends Exception {
+  public ItemNotFound() {
+  }
+
+  public ItemNotFound(String message) {
+    super(message);
+  }
+
+  public ItemNotFound(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public ItemNotFound(Throwable cause) {
+    super(cause);
+  }
+
+  public ItemNotFound(String message, Throwable cause, boolean 
enableSuppression, boolean writableStackTrace) {
+    super(message, cause, enableSuppression, writableStackTrace);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
new file mode 100644
index 0000000..8f72031
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/OnlyOwnersFilteringStrategy.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+public class OnlyOwnersFilteringStrategy implements FilteringStrategy {
+  private final String username;
+
+  public OnlyOwnersFilteringStrategy(String username) {
+    this.username = username;
+  }
+
+  @Override
+  public boolean isConform(Indexed item) {
+    Owned object = (Owned) item;
+    return object.getOwner().compareTo(username) == 0;
+  }
+
+  @Override
+  public String whereStatement() {
+    return "owner = '" + username + "'";
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
new file mode 100644
index 0000000..56793a5
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/Owned.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+/**
+ * Interface to represent item with owner
+ */
+public interface Owned {
+  /**
+   * Get the owner
+   * @return owner
+   */
+  String getOwner();
+
+  /**
+   * Set owner
+   * @param owner owner
+   */
+  void setOwner(String owner);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
new file mode 100644
index 0000000..6364b98
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/PersonalResource.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+public interface PersonalResource extends Indexed, Owned {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
new file mode 100644
index 0000000..1d3ea14
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/utils/StorageFactory.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.LocalKeyValueStorage;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Storage factory, creates storage of Local or Persistence API type.
+ * Type depends on context configuration: if "dataworker.storagePath" is set,
+ * storage of Local type will be created.  Otherwise, Persistence API will be 
used.
+ *
+ * Storage is singleton.
+ */
+public class StorageFactory implements IStorageFactory {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(StorageFactory.class);
+
+  private ViewContext context;
+
+  /**
+   * Constructor of storage factory
+   * @param context View Context instance
+   */
+  public StorageFactory(ViewContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Creates storage instance
+   * @return storage instance
+   */
+  public Storage getStorage() {
+    String fileName = context.getProperties().get("dataworker.storagePath");
+
+    Storage storageInstance;
+    if (fileName != null) {
+      LOG.debug("Using local storage in " + fileName + " to store data");
+      // If specifed, use LocalKeyValueStorage - key-value file based storage
+      storageInstance = new LocalKeyValueStorage(context);
+    } else {
+      LOG.debug("Using Persistence API to store data");
+      // If not specifed, use ambari-views Persistence API
+      storageInstance = new DataStoreStorage(context);
+    }
+    return storageInstance;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
new file mode 100644
index 0000000..41a8ee5
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/CRUDResourceManager.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.Storage;
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+
+import java.util.List;
+
+/**
+ * CRUD resource manager
+ * @param <T> Data type with ID
+ */
+abstract public class CRUDResourceManager<T extends Indexed> implements 
IResourceManager<T> {
+  //TODO: refactor: generic parameter gets Fabric for Indexed objects, not 
objects itself
+  private Storage storage = null;
+
+  protected final Class<? extends T> resourceClass;
+  protected IStorageFactory storageFactory;
+
+  /**
+   * Constructor
+   * @param resourceClass model class
+   */
+  public CRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory 
storageFactory) {
+    this.resourceClass = resourceClass;
+    this.storageFactory = storageFactory;
+  }
+  // CRUD operations
+
+  /**
+   * Create operation
+   * @param object object
+   * @return model object
+   */
+  @Override
+  public T create(T object) {
+    object.setId(null);
+    return this.save(object);
+  }
+
+  /**
+   * Read operation
+   * @param id identifier
+   * @return model object
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public T read(Object id) throws ItemNotFound {
+    T object = null;
+    object = storageFactory.getStorage().load(this.resourceClass, id);
+    if (!checkPermissions(object))
+      throw new ItemNotFound();
+    return object;
+  }
+
+  /**
+   * Read all objects
+   * @param filteringStrategy filtering strategy
+   * @return list of filtered objects
+   */
+  @Override
+  public List<T> readAll(FilteringStrategy filteringStrategy) {
+    return storageFactory.getStorage().loadAll(this.resourceClass, 
filteringStrategy);
+  }
+
+  /**
+   * Update operation
+   * @param newObject new object
+   * @param id identifier of previous object
+   * @return model object
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
+    newObject.setId(id);
+    this.save(newObject);
+    return newObject;
+  }
+
+  /**
+   * Delete operation
+   * @param resourceId object identifier
+   * @throws org.apache.ambari.view.hive20.persistence.utils.ItemNotFound
+   */
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    if (!storageFactory.getStorage().exists(this.resourceClass, resourceId)) {
+      throw new ItemNotFound();
+    }
+    storageFactory.getStorage().delete(this.resourceClass, resourceId);
+  }
+
+  // UTILS
+
+  protected T save(T object) {
+    storageFactory.getStorage().store(resourceClass, object);
+    return object;
+  }
+
+  protected abstract boolean checkPermissions(T object);
+
+  protected void cleanupAfterErrorAndThrowAgain(Indexed object, 
ServiceFormattedException e) {
+    try {
+      delete(object.getId());
+    } catch (ItemNotFound itemNotFound) {
+      throw new ServiceFormattedException("E040 Item not found", itemNotFound);
+    }
+    throw e;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
new file mode 100644
index 0000000..2fd3c53
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/IResourceManager.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+public interface IResourceManager<T extends Indexed> {
+  T create(T object);
+
+  T read(Object id) throws ItemNotFound;
+
+  List<T> readAll(FilteringStrategy filteringStrategy);
+
+  T update(T newObject, String id) throws ItemNotFound;
+
+  void delete(Object resourceId) throws ItemNotFound;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
new file mode 100644
index 0000000..8d2ab86
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/PersonalCRUDResourceManager.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive20.persistence.utils.PersonalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.Callable;
+
+/**
+ * Resource manager that returns only user owned elements from DB
+ * @param <T> Data type with ID and Owner
+ */
+public class PersonalCRUDResourceManager<T extends PersonalResource> extends 
CRUDResourceManager<T> {
+  protected boolean ignorePermissions = false;
+
+  private final static Logger LOG =
+      LoggerFactory.getLogger(PersonalCRUDResourceManager.class);
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param resourceClass model class
+   */
+  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, 
IStorageFactory storageFabric, ViewContext context) {
+    super(resourceClass, storageFabric);
+    this.context = context;
+  }
+
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
+    T object = storageFactory.getStorage().load(this.resourceClass, id);
+    if (object.getOwner().compareTo(this.context.getUsername()) != 0) {
+      throw new ItemNotFound();
+    }
+
+    newObject.setOwner(this.context.getUsername());
+    return super.update(newObject, id);
+  }
+
+  @Override
+  public T save(T object) {
+    if (!ignorePermissions) {
+      // in threads permissions should be ignored,
+      // because context.getUsername doesn't work. See BUG-27093.
+      object.setOwner(this.context.getUsername());
+    }
+    return super.save(object);
+  }
+
+  @Override
+  protected boolean checkPermissions(T object) {
+    if (ignorePermissions) {
+      return true;
+    }
+    return object.getOwner().compareTo(this.context.getUsername()) == 0;
+  }
+
+  /**
+   * Execute action ignoring objects owner
+   * @param actions callable to execute
+   * @return value returned from actions
+   * @throws Exception
+   */
+  public T ignorePermissions(Callable<T> actions) throws Exception {
+    ignorePermissions = true;
+    T result;
+    try {
+      result = actions.call();
+    } finally {
+      ignorePermissions = false;
+    }
+    return result;
+  }
+
+  protected String getUsername() {
+    return context.getUsername();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
new file mode 100644
index 0000000..ef48d9e
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/SharedCRUDResourceManager.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.persistence.IStorageFactory;
+import org.apache.ambari.view.hive20.persistence.utils.Indexed;
+
+/**
+ * Resource manager that doesn't restrict access (Allow all)
+ * @param <T> Data type with ID
+ */
+public class SharedCRUDResourceManager<T extends Indexed> extends 
CRUDResourceManager<T> {
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param responseClass model class
+   */
+  public SharedCRUDResourceManager(Class<T> responseClass, IStorageFactory 
storageFabric) {
+    super(responseClass, storageFabric);
+  }
+
+  @Override
+  protected boolean checkPermissions(T object) {
+    return true; //everyone has permission
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
new file mode 100644
index 0000000..30fda79
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/ConnectionService.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.AuthParams;
+import org.apache.ambari.view.hive20.ConnectionFactory;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.internal.ConnectionException;
+import org.apache.ambari.view.hive20.internal.HiveConnectionWrapper;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ * Connection verification and management controller
+ */
+public class ConnectionService {
+
+    public static final String NO_PASSWORD = "";
+    public static final String SUFFIX = "validating the login";
+    @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ConnectionService.class);
+
+  /**
+   * Check if LDAP is configured on Hive
+   * if no password is cached , ask for one(401)
+   * if yes and a password is cached, try
+   * to connect, if connection succeeds
+   * return OK,
+   *
+   * if connection fails - ask for one again(401)
+   */
+  @GET
+  @Path("connect")
+  @Produces(MediaType.APPLICATION_JSON)
+
+  public Response attemptConnection() {
+    boolean ldapEnabled = ConnectionFactory.isLdapEnabled(context);
+    if(ldapEnabled) {
+      ConnectionSystem instance = ConnectionSystem.getInstance();
+      Optional<String> password = instance.getPassword(context);
+      if (!password.isPresent()) {
+        // No password cached - request for one
+        return Response.status(Response.Status.UNAUTHORIZED).build();
+      }
+      // if there was a password cached, make a connection attempt
+      // get the password
+        String pass = password.get();
+      // password may be stale, try to connect to Hive
+        return attemptHiveConnection(pass);
+    }
+      return attemptHiveConnection(NO_PASSWORD);
+
+  }
+
+
+    private Response getOKResponse() {
+        JSONObject response = new JSONObject();
+        response.put("message", "OK");
+        response.put("trace", null);
+        response.put("status", "200");
+        return 
Response.ok().entity(response).type(MediaType.APPLICATION_JSON).build();
+    }
+
+    private Response attemptHiveConnection(String pass) {
+        ConnectionConfig connectionConfig = ConnectionFactory.create(context);
+        HiveConnectionWrapper hiveConnectionWrapper = new 
HiveConnectionWrapper(connectionConfig.getJdbcUrl(), 
connectionConfig.getUsername(), pass,new AuthParams(context));
+        try {
+          hiveConnectionWrapper.connect();
+        } catch (ConnectionException e) {
+          // Cannot connect with the current credentials
+          // check the message to see if the cause was a login failure
+          // return a 401
+          // else return a 500
+          if(isLoginError(e))
+            return Response.status(Response.Status.UNAUTHORIZED).build();
+          else
+              throw new ServiceFormattedException(e.getMessage(), e);
+        } finally {
+          try {
+            hiveConnectionWrapper.disconnect();
+          }
+        catch(ConnectionException e){
+           LOG.warn("Cannot close the connection");
+        }
+      }
+        return getOKResponse()  ;
+    }
+
+    private boolean isLoginError(ConnectionException ce) {
+        return ce.getCause().getMessage().toLowerCase().endsWith(SUFFIX);
+    }
+
+
+    /**
+     * Set password
+     * This just updates the caches.
+     */
+    @POST
+    @Path("auth")
+    @Consumes(MediaType.APPLICATION_JSON)
+    public Response setupPassword(AuthRequest request) {
+        try {
+            //Cache the password for the user
+            ConnectionSystem instance = ConnectionSystem.getInstance();
+            
instance.persistCredentials(context.getUsername(),request.password);
+            return getOKResponse();
+        } catch (WebApplicationException ex) {
+            throw ex;
+        } catch (Exception ex) {
+            throw new ServiceFormattedException(ex.getMessage(), ex);
+        }
+    }
+
+
+
+    public static class AuthRequest {
+        public String password;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
new file mode 100644
index 0000000..5a2d389
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -0,0 +1,302 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import com.google.common.base.Function;
+import com.google.common.base.Optional;
+import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
+import com.google.common.collect.FluentIterable;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.actor.DatabaseManager;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.DDLDelegator;
+import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.Row;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseInfo;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
+import org.apache.ambari.view.hive20.internal.dto.TableInfo;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.TableResponse;
+import org.apache.ambari.view.hive20.internal.parsers.TableMetaParserImpl;
+import 
org.apache.ambari.view.hive20.internal.query.generators.AlterTableQueryGenerator;
+import 
org.apache.ambari.view.hive20.internal.query.generators.CreateTableQueryGenerator;
+import 
org.apache.ambari.view.hive20.internal.query.generators.DeleteTableQueryGenerator;
+import org.apache.ambari.view.hive20.resources.jobs.JobServiceInternal;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobController;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobImpl;
+import 
org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.concurrent.duration.Duration;
+
+import javax.annotation.Nullable;
+import javax.inject.Inject;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+/**
+ *
+ */
+public class DDLProxy {
+  private static final Logger LOG = LoggerFactory.getLogger(DDLProxy.class);
+
+  private final ViewContext context;
+  private final TableMetaParserImpl tableMetaParser;
+
+  @Inject
+  public DDLProxy(ViewContext context, TableMetaParserImpl tableMetaParser) {
+    this.context = context;
+    this.tableMetaParser = tableMetaParser;
+    LOG.info("Creating DDLProxy");
+  }
+
+
+  public Set<DatabaseResponse> getDatabases() {
+    Set<DatabaseInfo> infos = getDatabaseInfos();
+    return transformToDatabasesResponse(infos);
+  }
+
+  public DatabaseResponse getDatabase(final String databaseId) {
+    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
+    if (!infoOptional.isPresent()) {
+      // Throw exception
+    }
+
+    return transformToDatabaseResponse(infoOptional.get());
+  }
+
+  public Set<TableResponse> getTables(final String databaseId) {
+    Optional<DatabaseInfo> infoOptional = selectDatabase(databaseId);
+    if (!infoOptional.isPresent()) {
+      // Throw exception;
+    }
+    DatabaseInfo info = infoOptional.get();
+    return transformToTablesResponse(info.getTables(), info.getName());
+  }
+
+  public TableResponse getTable(final String databaseName, final String 
tableName) {
+    Optional<DatabaseInfo> databaseOptional = selectDatabase(databaseName);
+    if (!databaseOptional.isPresent()) {
+      // Throw exception;
+    }
+    Optional<TableInfo> tableOptional = 
selectTable(databaseOptional.get().getTables(), tableName);
+    if (!tableOptional.isPresent()) {
+      // Throw exception
+    }
+    return transformToTableResponse(tableOptional.get(), databaseName);
+  }
+
+  public TableMeta getTableProperties(ViewContext context, ConnectionConfig 
connectionConfig, String databaseName, String tableName) {
+    DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+    List<Row> createTableStatementRows = 
delegator.getTableCreateStatement(connectionConfig, databaseName, tableName);
+    List<Row> describeFormattedRows = 
delegator.getTableDescriptionFormatted(connectionConfig, databaseName, 
tableName);
+
+    return tableMetaParser.parse(databaseName, tableName, 
createTableStatementRows, describeFormattedRows);
+  }
+
+  private Optional<DatabaseInfo> selectDatabase(final String databaseId) {
+    Set<DatabaseInfo> infos = getDatabaseInfos();
+    return FluentIterable.from(infos).filter(new Predicate<DatabaseInfo>() {
+      @Override
+      public boolean apply(@Nullable DatabaseInfo input) {
+        return input.getName().equalsIgnoreCase(databaseId);
+      }
+    }).first();
+  }
+
+  private Set<DatabaseResponse> transformToDatabasesResponse(Set<DatabaseInfo> 
infos) {
+    return FluentIterable.from(infos).transform(new Function<DatabaseInfo, 
DatabaseResponse>() {
+      @Nullable
+      @Override
+      public DatabaseResponse apply(@Nullable DatabaseInfo input) {
+        DatabaseResponse response = new DatabaseResponse();
+        response.setId(input.getName());
+        response.setName(input.getName());
+        return response;
+      }
+    }).toSet();
+  }
+
+  private DatabaseResponse transformToDatabaseResponse(DatabaseInfo 
databaseInfo) {
+    DatabaseResponse response = new DatabaseResponse();
+    response.setName(databaseInfo.getName());
+    response.setId(databaseInfo.getName());
+    Set<TableResponse> tableResponses = 
transformToTablesResponse(databaseInfo.getTables(), databaseInfo.getName());
+    response.addAllTables(tableResponses);
+    return response;
+  }
+
+  private Set<TableResponse> transformToTablesResponse(final Set<TableInfo> 
tables, final String databaseName) {
+    return FluentIterable.from(tables).transform(new Function<TableInfo, 
TableResponse>() {
+      @Nullable
+      @Override
+      public TableResponse apply(@Nullable TableInfo input) {
+        return transformToTableResponse(input, databaseName);
+      }
+    }).toSet();
+  }
+
+  private TableResponse transformToTableResponse(TableInfo tableInfo, String 
databaseName) {
+    TableResponse response = new TableResponse();
+    response.setId(databaseName + "/" + tableInfo.getName());
+    response.setName(tableInfo.getName());
+    response.setType(tableInfo.getType());
+    response.setDatabaseId(databaseName);
+    return response;
+  }
+
+  private Optional<TableInfo> selectTable(Set<TableInfo> tables, final String 
tableName) {
+    return FluentIterable.from(tables).filter(new Predicate<TableInfo>() {
+      @Override
+      public boolean apply(@Nullable TableInfo input) {
+        return input.getName().equalsIgnoreCase(tableName);
+      }
+    }).first();
+  }
+
+  private Set<DatabaseInfo> getDatabaseInfos() {
+    ActorRef metaDataManager = 
ConnectionSystem.getInstance().getMetaDataManager(context);
+    ActorSystem system = ConnectionSystem.getInstance().getActorSystem();
+
+    Inbox inbox = Inbox.create(system);
+
+    inbox.send(metaDataManager, new 
DatabaseManager.GetDatabases(context.getUsername()));
+    Object receive;
+    try {
+      receive = inbox.receive(Duration.create(60 * 1000, 
TimeUnit.MILLISECONDS));
+    } catch (Throwable ex) {
+      String errorMessage = "Query timed out to fetch databases information 
for user: " + context.getUsername();
+      LOG.error(errorMessage, ex);
+      throw new ServiceFormattedException(errorMessage, ex);
+    }
+    Set<DatabaseInfo> infos = new HashSet<>();
+
+    if (receive instanceof DatabaseManager.DatabasesResult) {
+      infos = ((DatabaseManager.DatabasesResult) receive).getDatabases();
+    }
+    return infos;
+  }
+
+  public String generateCreateTableDDL(String databaseName, TableMeta 
tableMeta) throws ServiceException {
+    if (Strings.isNullOrEmpty(tableMeta.getDatabase())) {
+      tableMeta.setDatabase(databaseName);
+    }
+    Optional<String> createTableQuery = new 
CreateTableQueryGenerator(tableMeta).getQuery();
+    if(createTableQuery.isPresent()) {
+      LOG.info("generated create table query : {}", createTableQuery);
+      return createTableQuery.get();
+    }else {
+      throw new ServiceException("could not generate create table query for 
database : " + databaseName + " table : " + tableMeta.getTable());
+    }
+  }
+
+  public Job createTable(String databaseName, TableMeta tableMeta, 
JobResourceManager resourceManager) throws ServiceException {
+    String createTableQuery = this.generateCreateTableDDL(databaseName, 
tableMeta);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Create table " + tableMeta.getDatabase() + "." + 
tableMeta.getTable());
+    jobInfo.put("forcedContent", createTableQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new 
JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for create table {}", 
returnableJob.getId(), tableMeta.getTable());
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while creating the table for create Query 
: {}", createTableQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public Job deleteTable(String databaseName, String tableName, 
JobResourceManager resourceManager) throws ServiceException {
+    String deleteTableQuery = generateDeleteTableDDL(databaseName, tableName);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Delete table " + databaseName + "." + tableName);
+    jobInfo.put("forcedContent", deleteTableQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new 
JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for the deletion of table : {}", 
returnableJob.getId(), tableName);
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while deleting the table for delete Query 
: {}", deleteTableQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public String generateDeleteTableDDL(String databaseName, String tableName) 
throws ServiceException {
+    Optional<String> deleteTableQuery = new 
DeleteTableQueryGenerator(databaseName, tableName).getQuery();
+    if(deleteTableQuery.isPresent()) {
+      LOG.info("deleting table {} with query {}", databaseName + "." + 
tableName, deleteTableQuery);
+      return deleteTableQuery.get();
+    }else{
+      throw new ServiceException("Failed to generate query for delete table " 
+ databaseName + "." + tableName);
+    }
+  }
+
+  public Job alterTable(ViewContext context, ConnectionConfig 
hiveConnectionConfig, String databaseName, String oldTableName, TableMeta 
newTableMeta, JobResourceManager resourceManager) throws ServiceException {
+    String alterQuery = generateAlterTableQuery(context, hiveConnectionConfig, 
databaseName, oldTableName, newTableMeta);
+    Map jobInfo = new HashMap<>();
+    jobInfo.put("title", "Alter table " + databaseName + "." + oldTableName);
+    jobInfo.put("forcedContent", alterQuery);
+    jobInfo.put("dataBase", databaseName);
+
+    try {
+      Job job = new JobImpl(jobInfo);
+      JobController createdJobController = new 
JobServiceInternal().createJob(job, resourceManager);
+      Job returnableJob = createdJobController.getJobPOJO();
+      LOG.info("returning job with id {} for alter table {}", 
returnableJob.getId(), oldTableName);
+      return returnableJob;
+    } catch (Throwable e) {
+      LOG.error("Exception occurred while creating the table for create Query 
: {}", alterQuery, e);
+      throw new ServiceException(e);
+    }
+  }
+
+  public String generateAlterTableQuery(ViewContext context, ConnectionConfig 
hiveConnectionConfig, String databaseName, String oldTableName, TableMeta 
newTableMeta) throws ServiceException {
+    TableMeta oldTableMeta = this.getTableProperties(context, 
hiveConnectionConfig, databaseName, oldTableName);
+    return generateAlterTableQuery(oldTableMeta, newTableMeta);
+  }
+
+  public String generateAlterTableQuery(TableMeta oldTableMeta, TableMeta 
newTableMeta) throws ServiceException {
+    AlterTableQueryGenerator queryGenerator = new 
AlterTableQueryGenerator(oldTableMeta, newTableMeta);
+    Optional<String> alterQuery = queryGenerator.getQuery();
+    if(alterQuery.isPresent()){
+      return alterQuery.get();
+    }else{
+      throw new ServiceException("Failed to generate alter table query for 
table " + oldTableMeta.getDatabase() + "." + oldTableMeta.getTable());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
new file mode 100644
index 0000000..3d4e7d7
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.exceptions.ServiceException;
+import org.apache.ambari.view.hive20.internal.dto.DatabaseResponse;
+import org.apache.ambari.view.hive20.internal.dto.TableMeta;
+import org.apache.ambari.view.hive20.internal.dto.TableResponse;
+import org.apache.ambari.view.hive20.resources.jobs.viewJobs.Job;
+import 
org.apache.ambari.view.hive20.resources.jobs.viewJobs.JobResourceManager;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive20.utils.SharedObjectsFactory;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.Set;
+
+/**
+ * Resource to get the DDL information for the database
+ */
+public class DDLService extends BaseService {
+
+  private static final String CREATE_TABLE = "create-table";
+  private static final String ALTER_TABLE = "alter-table";
+  private final DDLProxy proxy;
+  private JobResourceManager resourceManager;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(DDLService.class);
+
+  protected synchronized JobResourceManager getResourceManager() {
+    if (resourceManager == null) {
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
+    }
+    return resourceManager;
+  }
+
+  @Inject
+  public DDLService(DDLProxy proxy) {
+    this.proxy = proxy;
+  }
+
+
+  @GET
+  @Path("databases")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDatabases(@QueryParam("like") String like) {
+    Set<DatabaseResponse> infos = proxy.getDatabases();
+    JSONObject response = new JSONObject();
+    response.put("databases", infos);
+    return Response.ok(response).build();
+  }
+
+  @GET
+  @Path("databases/{database_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getDatabase(@PathParam("database_id") String databaseId) {
+    DatabaseResponse database = proxy.getDatabase(databaseId);
+    JSONObject response = new JSONObject();
+    response.put("database", database);
+    return Response.ok(response).build();
+  }
+
+
+  @GET
+  @Path("databases/{database_id}/tables")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getTables(@PathParam("database_id") String databaseName) {
+    Set<TableResponse> tables = proxy.getTables(databaseName);
+    JSONObject response = new JSONObject();
+    response.put("tables", tables);
+    return Response.ok(response).build();
+  }
+
+  @POST
+  @Path("databases/{database_id}/tables")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response createTable(@PathParam("database_id") String databaseName, 
TableMetaRequest request) {
+    try {
+      Job job = proxy.createTable(databaseName, request.tableInfo, 
getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(job).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while creatint table for db {} with 
details : {}", databaseName, request.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @POST
+  @Path("databases/{database_id}/tables/ddl")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response generateDDL(TableMetaRequest request, 
@QueryParam("query_type") String queryType) {
+    try {
+      String query = null;
+      if (queryType.equals(CREATE_TABLE)) {
+        query = proxy.generateCreateTableDDL(request.tableInfo.getDatabase(), 
request.tableInfo);
+      }else if(queryType.equals(ALTER_TABLE)){
+        query = proxy.generateAlterTableQuery(context, 
getHiveConnectionConfig(), request.tableInfo.getDatabase(), 
request.tableInfo.getTable(), request.tableInfo);
+      }else{
+        throw new ServiceException("query_type = '" + queryType + "' is not 
supported");
+      }
+      JSONObject response = new JSONObject();
+      response.put("ddl", new DDL(query));
+      return 
Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while generating {} ddl for : {}", 
queryType, request.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response getTable(@PathParam("database_id") String databaseName, 
@PathParam("table_id") String tableName) {
+    TableResponse table = proxy.getTable(databaseName, tableName);
+    JSONObject response = new JSONObject();
+    response.put("table", table);
+    return Response.ok(response).build();
+  }
+
+  /**
+   *
+   * @param databaseName
+   * @param oldTableName : this is required in case if the name of table 
itself is changed in tableMeta
+   * @param tableMetaRequest
+   * @return
+   */
+  @PUT
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response alterTable(@PathParam("database_id") String databaseName, 
@PathParam("table_id") String oldTableName, TableMetaRequest tableMetaRequest) {
+    try {
+      ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+      Job job = proxy.alterTable(context, hiveConnectionConfig, databaseName, 
oldTableName, tableMetaRequest.tableInfo, getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return Response.status(Response.Status.ACCEPTED).entity(job).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while creatint table for db {} with 
details : {}", databaseName, tableMetaRequest.tableInfo, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @DELETE
+  @Path("databases/{database_id}/tables/{table_id}")
+  @Produces(MediaType.APPLICATION_JSON)
+  @Consumes(MediaType.APPLICATION_JSON)
+  public Response deleteTable(@PathParam("database_id") String databaseName, 
@PathParam("table_id") String tableName) {
+    try {
+      Job job = proxy.deleteTable(databaseName, tableName, 
getResourceManager());
+      JSONObject response = new JSONObject();
+      response.put("job", job);
+      return 
Response.status(Response.Status.ACCEPTED).entity(response).build();
+    } catch (ServiceException e) {
+      LOG.error("Exception occurred while deleting table for db {}, tableName 
: {}", databaseName, tableName, e);
+      throw new ServiceFormattedException(e);
+    }
+  }
+
+  @GET
+  @Path("databases/{database_id}/tables/{table_id}/info")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getTableInfo(@PathParam("database_id") String databaseName, 
@PathParam("table_id") String tableName) {
+    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+    TableMeta meta = proxy.getTableProperties(context, hiveConnectionConfig, 
databaseName, tableName);
+    JSONObject response = new JSONObject();
+    response.put("tableInfo", meta);
+    return Response.ok(response).build();
+  }
+
+  public static class DDL {
+    String query;
+
+    public DDL(String query) {
+      this.query = query;
+    }
+  }
+
+  /**
+   * Wrapper class for table meta request
+   */
+  public static class TableMetaRequest {
+    public TableMeta tableInfo;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
new file mode 100644
index 0000000..96e9554
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.commons.hdfs.FileOperationService;
+import org.apache.ambari.view.hive20.BaseService;
+
+import javax.ws.rs.Path;
+
+/**
+ *
+ */
+public class FileService extends BaseService {
+
+  @Path("/ops")
+  public FileOperationService fileOps() {
+    return new FileOperationService(context);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
new file mode 100644
index 0000000..274ea20
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/HiveBrowserService.java
@@ -0,0 +1,259 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.browser;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.hive20.BaseService;
+import org.apache.ambari.view.hive20.ConnectionSystem;
+import org.apache.ambari.view.hive20.client.ColumnDescription;
+import org.apache.ambari.view.hive20.client.ConnectionConfig;
+import org.apache.ambari.view.hive20.client.Cursor;
+import org.apache.ambari.view.hive20.client.DDLDelegator;
+import org.apache.ambari.view.hive20.client.DDLDelegatorImpl;
+import org.apache.ambari.view.hive20.client.Row;
+import 
org.apache.ambari.view.hive20.resources.jobs.ResultsPaginationController;
+import org.apache.ambari.view.hive20.utils.BadRequestFormattedException;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * Database access resource
+ */
+public class HiveBrowserService extends BaseService {
+  @Inject
+  ViewResourceHandler handler;
+  @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(HiveBrowserService.class);
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response databases(@QueryParam("like") String like,
+                            @QueryParam("first") String fromBeginning,
+                            @QueryParam("count") Integer count,
+                            @QueryParam("columns") final String 
requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    JSONObject response = new JSONObject();
+    ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
+    DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+    List<String> databases = delegator.getDbList(hiveConnectionConfig, like);
+    response.put("databases", databases);
+
+    return Response.ok(response).build();
+
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response databasesPaginated(@QueryParam("like") String like,
+                                     @QueryParam("first") String fromBeginning,
+                                     @QueryParam("count") Integer count,
+                                     @QueryParam("searchId") String searchId,
+                                     @QueryParam("format") String format,
+                                     @QueryParam("columns") final String 
requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    String curl = null;
+    try {
+      final String finalLike = like;
+      final DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+      return ResultsPaginationController.getInstance(context)
+          .request("databases", searchId, false, fromBeginning, count, format, 
requestedColumns,
+            new Callable<Cursor<Row, ColumnDescription>>() {
+              @Override
+              public Cursor<Row, ColumnDescription> call() throws Exception {
+                return delegator.getDbListCursor(getHiveConnectionConfig(), 
finalLike);
+              }
+            }).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response tablesInDatabase(@PathParam("db") String db,
+                                   @QueryParam("like") String like,
+                                   @QueryParam("first") String fromBeginning,
+                                   @QueryParam("count") Integer count,
+                                   @QueryParam("columns") final String 
requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+
+    JSONObject response = new JSONObject();
+    DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+    List<String> tables = delegator.getTableList(getHiveConnectionConfig(), 
db, like);
+    response.put("tables", tables);
+    response.put("database", db);
+    return Response.ok(response).build();
+
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response tablesInDatabasePaginated(@PathParam("db") final String db,
+                                            @QueryParam("like") String like,
+                                            @QueryParam("first") String 
fromBeginning,
+                                            @QueryParam("count") Integer count,
+                                            @QueryParam("searchId") String 
searchId,
+                                            @QueryParam("format") String 
format,
+                                            @QueryParam("columns") final 
String requestedColumns) {
+    if (like == null)
+      like = "*";
+    else
+      like = "*" + like + "*";
+    String curl = null;
+    try {
+      final String finalLike = like;
+      final DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+      try {
+        return ResultsPaginationController.getInstance(context)
+          .request(db + ":tables:", searchId, false, fromBeginning, count, 
format, requestedColumns,
+            new Callable<Cursor<Row, ColumnDescription>>() {
+              @Override
+              public Cursor<Row, ColumnDescription> call() throws Exception {
+                return delegator.getTableListCursor(getHiveConnectionConfig(), 
db, finalLike);
+              }
+            }).build();
+      } catch (Exception ex) {
+        throw new ServiceFormattedException(ex.getMessage(), ex);
+      }
+
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table/{table}")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response describeTable(@PathParam("db") String db,
+                                @PathParam("table") String table,
+                                @QueryParam("like") String like,
+                                @QueryParam("columns") String requestedColumns,
+                                @QueryParam("extended") String extended) {
+    boolean extendedTableDescription = (extended != null && 
extended.equals("true"));
+    String curl = null;
+    try {
+      JSONObject response = new JSONObject();
+      DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+      List<ColumnDescription> descriptions = 
delegator.getTableDescription(getHiveConnectionConfig(), db, table, "%", 
extendedTableDescription);
+      response.put("columns", descriptions);
+      response.put("database", db);
+      response.put("table", table);
+
+      //TODO: New implementation
+
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (IllegalArgumentException ex) {
+      throw new BadRequestFormattedException(ex.getMessage(), ex);
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex, curl);
+    }
+  }
+
+  /**
+   * Returns list of databases
+   */
+  @GET
+  @Path("database/{db}/table/{table}.page")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response describeTablePaginated(@PathParam("db") final String db,
+                                         @PathParam("table") final String 
table,
+                                         @QueryParam("like") String like,
+                                         @QueryParam("first") String 
fromBeginning,
+                                         @QueryParam("searchId") String 
searchId,
+                                         @QueryParam("count") Integer count,
+                                         @QueryParam("format") String format,
+                                         @QueryParam("columns") final String 
requestedColumns) {
+    if (like == null)
+      like = ".*";
+    else
+      like = ".*" + like + ".*";
+    final String finalLike = like;
+
+    final DDLDelegator delegator = new DDLDelegatorImpl(context, 
ConnectionSystem.getInstance().getActorSystem(), 
ConnectionSystem.getInstance().getOperationController(context));
+    try {
+      return ResultsPaginationController.getInstance(context)
+        .request(db + ":tables:" + table + ":columns", searchId, false, 
fromBeginning, count, format, requestedColumns,
+          new Callable<Cursor<Row, ColumnDescription>>() {
+            @Override
+            public Cursor<Row, ColumnDescription> call() throws Exception {
+              return 
delegator.getTableDescriptionCursor(getHiveConnectionConfig(), db, table, 
finalLike, false);
+            }
+          }).build();
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
new file mode 100644
index 0000000..d8bf51f
--- /dev/null
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileResource.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.files;
+
+/**
+ * File bean
+ */
+public class FileResource {
+  private String filePath;
+  private String fileContent;
+  private boolean hasNext;
+  private long page;
+  private long pageCount;
+
+  public String getFilePath() {
+    return filePath;
+  }
+
+  public void setFilePath(String filePath) {
+    this.filePath = filePath;
+  }
+
+  public String getFileContent() {
+    return fileContent;
+  }
+
+  public void setFileContent(String fileContent) {
+    this.fileContent = fileContent;
+  }
+
+  public boolean isHasNext() {
+    return hasNext;
+  }
+
+  public void setHasNext(boolean hasNext) {
+    this.hasNext = hasNext;
+  }
+
+  public long getPage() {
+    return page;
+  }
+
+  public void setPage(long page) {
+    this.page = page;
+  }
+
+  public long getPageCount() {
+    return pageCount;
+  }
+
+  public void setPageCount(long pageCount) {
+    this.pageCount = pageCount;
+  }
+}
\ No newline at end of file

Reply via email to