http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
new file mode 100644
index 0000000..0ef41a6
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
@@ -0,0 +1,1042 @@
+{
+  "HDP": {
+    "stack-select": {
+      "ACCUMULO": {
+        "ACCUMULO_CLIENT": {
+          "STACK-SELECT-PACKAGE": "accumulo-client",
+          "INSTALL": [
+            "accumulo-client"
+          ],
+          "PATCH": [
+            "accumulo-client"
+          ],
+          "STANDARD": [
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_GC": {
+          "STACK-SELECT-PACKAGE": "accumulo-gc",
+          "INSTALL": [
+            "accumulo-gc"
+          ],
+          "PATCH": [
+            "accumulo-gc"
+          ],
+          "STANDARD": [
+            "accumulo-gc",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MASTER": {
+          "STACK-SELECT-PACKAGE": "accumulo-master",
+          "INSTALL": [
+            "accumulo-master"
+          ],
+          "PATCH": [
+            "accumulo-master"
+          ],
+          "STANDARD": [
+            "accumulo-master",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MONITOR": {
+          "STACK-SELECT-PACKAGE": "accumulo-monitor",
+          "INSTALL": [
+            "accumulo-monitor"
+          ],
+          "PATCH": [
+            "accumulo-monitor"
+          ],
+          "STANDARD": [
+            "accumulo-monitor",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TRACER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tracer",
+          "INSTALL": [
+            "accumulo-tracer"
+          ],
+          "PATCH": [
+            "accumulo-tracer"
+          ],
+          "STANDARD": [
+            "accumulo-tracer",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TSERVER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tablet",
+          "INSTALL": [
+            "accumulo-tablet"
+          ],
+          "PATCH": [
+            "accumulo-tablet"
+          ],
+          "STANDARD": [
+            "accumulo-tablet",
+            "accumulo-client"
+          ]
+        }
+      },
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "DRUID": {
+        "DRUID_COORDINATOR": {
+          "STACK-SELECT-PACKAGE": "druid-coordinator",
+          "INSTALL": [
+            "druid-coordinator"
+          ],
+          "PATCH": [
+            "druid-coordinator"
+          ],
+          "STANDARD": [
+            "druid-coordinator"
+          ]
+        },
+        "DRUID_OVERLORD": {
+          "STACK-SELECT-PACKAGE": "druid-overlord",
+          "INSTALL": [
+            "druid-overlord"
+          ],
+          "PATCH": [
+            "druid-overlord"
+          ],
+          "STANDARD": [
+            "druid-overlord"
+          ]
+        },
+        "DRUID_HISTORICAL": {
+          "STACK-SELECT-PACKAGE": "druid-historical",
+          "INSTALL": [
+            "druid-historical"
+          ],
+          "PATCH": [
+            "druid-historical"
+          ],
+          "STANDARD": [
+            "druid-historical"
+          ]
+        },
+        "DRUID_BROKER": {
+          "STACK-SELECT-PACKAGE": "druid-broker",
+          "INSTALL": [
+            "druid-broker"
+          ],
+          "PATCH": [
+            "druid-broker"
+          ],
+          "STANDARD": [
+            "druid-broker"
+          ]
+        },
+        "DRUID_MIDDLEMANAGER": {
+          "STACK-SELECT-PACKAGE": "druid-middlemanager",
+          "INSTALL": [
+            "druid-middlemanager"
+          ],
+          "PATCH": [
+            "druid-middlemanager"
+          ],
+          "STANDARD": [
+            "druid-middlemanager"
+          ]
+        },
+        "DRUID_ROUTER": {
+          "STACK-SELECT-PACKAGE": "druid-router",
+          "INSTALL": [
+            "druid-router"
+          ],
+          "PATCH": [
+            "druid-router"
+          ],
+          "STANDARD": [
+            "druid-router"
+          ]
+        },
+        "DRUID_SUPERSET": {
+          "STACK-SELECT-PACKAGE": "druid-superset",
+          "INSTALL": [
+            "druid-superset"
+          ],
+          "PATCH": [
+            "druid-superset"
+          ],
+          "STANDARD": [
+            "druid-superset"
+          ]
+        }
+      },
+      "FALCON": {
+        "FALCON_CLIENT": {
+          "STACK-SELECT-PACKAGE": "falcon-client",
+          "INSTALL": [
+            "falcon-client"
+          ],
+          "PATCH": [
+            "falcon-client"
+          ],
+          "STANDARD": [
+            "falcon-client"
+          ]
+        },
+        "FALCON_SERVER": {
+          "STACK-SELECT-PACKAGE": "falcon-server",
+          "INSTALL": [
+            "falcon-server"
+          ],
+          "PATCH": [
+            "falcon-server"
+          ],
+          "STANDARD": [
+            "falcon-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "phoenix-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAHOUT": {
+        "MAHOUT": {
+          "STACK-SELECT-PACKAGE": "mahout-client",
+          "INSTALL": [
+            "mahout-client"
+          ],
+          "PATCH": [
+            "mahout-client"
+          ],
+          "STANDARD": [
+            "mahout-client"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    },
+    "conf-select": {
+      "accumulo": [
+        {
+          "conf_dir": "/etc/accumulo/conf",
+          "current_dir": "{0}/current/accumulo-client/conf"
+        }
+      ],
+      "atlas": [
+        {
+          "conf_dir": "/etc/atlas/conf",
+          "current_dir": "{0}/current/atlas-client/conf"
+        }
+      ],
+      "druid": [
+        {
+          "conf_dir": "/etc/druid/conf",
+          "current_dir": "{0}/current/druid-overlord/conf"
+        }
+      ],
+      "falcon": [
+        {
+          "conf_dir": "/etc/falcon/conf",
+          "current_dir": "{0}/current/falcon-client/conf"
+        }
+      ],
+      "flume": [
+        {
+          "conf_dir": "/etc/flume/conf",
+          "current_dir": "{0}/current/flume-server/conf"
+        }
+      ],
+      "hadoop": [
+        {
+          "conf_dir": "/etc/hadoop/conf",
+          "current_dir": "{0}/current/hadoop-client/conf"
+        }
+      ],
+      "hbase": [
+        {
+          "conf_dir": "/etc/hbase/conf",
+          "current_dir": "{0}/current/hbase-client/conf"
+        }
+      ],
+      "hive": [
+        {
+          "conf_dir": "/etc/hive/conf",
+          "current_dir": "{0}/current/hive-client/conf"
+        }
+      ],
+      "hive2": [
+        {
+          "conf_dir": "/etc/hive2/conf",
+          "current_dir": "{0}/current/hive-server2-hive2/conf"
+        }
+      ],
+      "hive-hcatalog": [
+        {
+          "conf_dir": "/etc/hive-webhcat/conf",
+          "prefix": "/etc/hive-webhcat",
+          "current_dir": "{0}/current/hive-webhcat/etc/webhcat"
+        },
+        {
+          "conf_dir": "/etc/hive-hcatalog/conf",
+          "prefix": "/etc/hive-hcatalog",
+          "current_dir": "{0}/current/hive-webhcat/etc/hcatalog"
+        }
+      ],
+      "kafka": [
+        {
+          "conf_dir": "/etc/kafka/conf",
+          "current_dir": "{0}/current/kafka-broker/conf"
+        }
+      ],
+      "knox": [
+        {
+          "conf_dir": "/etc/knox/conf",
+          "current_dir": "{0}/current/knox-server/conf"
+        }
+      ],
+      "mahout": [
+        {
+          "conf_dir": "/etc/mahout/conf",
+          "current_dir": "{0}/current/mahout-client/conf"
+        }
+      ],
+      "nifi": [
+        {
+          "conf_dir": "/etc/nifi/conf",
+          "current_dir": "{0}/current/nifi/conf"
+        }
+      ],
+      "oozie": [
+        {
+          "conf_dir": "/etc/oozie/conf",
+          "current_dir": "{0}/current/oozie-client/conf"
+        }
+      ],
+      "phoenix": [
+        {
+          "conf_dir": "/etc/phoenix/conf",
+          "current_dir": "{0}/current/phoenix-client/conf"
+        }
+      ],
+      "pig": [
+        {
+          "conf_dir": "/etc/pig/conf",
+          "current_dir": "{0}/current/pig-client/conf"
+        }
+      ],
+      "ranger-admin": [
+        {
+          "conf_dir": "/etc/ranger/admin/conf",
+          "current_dir": "{0}/current/ranger-admin/conf"
+        }
+      ],
+      "ranger-kms": [
+        {
+          "conf_dir": "/etc/ranger/kms/conf",
+          "current_dir": "{0}/current/ranger-kms/conf"
+        }
+      ],
+      "ranger-tagsync": [
+        {
+          "conf_dir": "/etc/ranger/tagsync/conf",
+          "current_dir": "{0}/current/ranger-tagsync/conf"
+        }
+      ],
+      "ranger-usersync": [
+        {
+          "conf_dir": "/etc/ranger/usersync/conf",
+          "current_dir": "{0}/current/ranger-usersync/conf"
+        }
+      ],
+      "slider": [
+        {
+          "conf_dir": "/etc/slider/conf",
+          "current_dir": "{0}/current/slider-client/conf"
+        }
+      ],
+      "spark": [
+        {
+          "conf_dir": "/etc/spark/conf",
+          "current_dir": "{0}/current/spark-client/conf"
+        }
+      ],
+      "spark2": [
+        {
+          "conf_dir": "/etc/spark2/conf",
+          "current_dir": "{0}/current/spark2-client/conf"
+        }
+      ],
+      "sqoop": [
+        {
+          "conf_dir": "/etc/sqoop/conf",
+          "current_dir": "{0}/current/sqoop-client/conf"
+        }
+      ],
+      "storm": [
+        {
+          "conf_dir": "/etc/storm/conf",
+          "current_dir": "{0}/current/storm-client/conf"
+        }
+      ],
+      "storm-slider-client": [
+        {
+          "conf_dir": "/etc/storm-slider-client/conf",
+          "current_dir": "{0}/current/storm-slider-client/conf"
+        }
+      ],
+      "superset": [
+        {
+          "conf_dir": "/etc/druid-superset/conf",
+          "current_dir": "{0}/current/druid-superset/conf"
+        }
+      ],
+      "tez": [
+        {
+          "conf_dir": "/etc/tez/conf",
+          "current_dir": "{0}/current/tez-client/conf"
+        }
+      ],
+      "zeppelin": [
+        {
+          "conf_dir": "/etc/zeppelin/conf",
+          "current_dir": "{0}/current/zeppelin-server/conf"
+        }
+      ],
+      "zookeeper": [
+        {
+          "conf_dir": "/etc/zookeeper/conf",
+          "current_dir": "{0}/current/zookeeper-client/conf"
+        }
+      ]
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_select_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_select_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_select_packages.json
deleted file mode 100644
index 0ad2626..0000000
--- 
a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_select_packages.json
+++ /dev/null
@@ -1,848 +0,0 @@
-{
-  "HDP": {
-    "stack-select": {
-      "ACCUMULO": {
-        "ACCUMULO_CLIENT": {
-          "STACK-SELECT-PACKAGE": "accumulo-client",
-          "INSTALL": [
-            "accumulo-client"
-          ],
-          "PATCH": [
-            "accumulo-client"
-          ],
-          "STANDARD": [
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_GC": {
-          "STACK-SELECT-PACKAGE": "accumulo-gc",
-          "INSTALL": [
-            "accumulo-gc"
-          ],
-          "PATCH": [
-            "accumulo-gc"
-          ],
-          "STANDARD": [
-            "accumulo-gc",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_MASTER": {
-          "STACK-SELECT-PACKAGE": "accumulo-master",
-          "INSTALL": [
-            "accumulo-master"
-          ],
-          "PATCH": [
-            "accumulo-master"
-          ],
-          "STANDARD": [
-            "accumulo-master",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_MONITOR": {
-          "STACK-SELECT-PACKAGE": "accumulo-monitor",
-          "INSTALL": [
-            "accumulo-monitor"
-          ],
-          "PATCH": [
-            "accumulo-monitor"
-          ],
-          "STANDARD": [
-            "accumulo-monitor",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_TRACER": {
-          "STACK-SELECT-PACKAGE": "accumulo-tracer",
-          "INSTALL": [
-            "accumulo-tracer"
-          ],
-          "PATCH": [
-            "accumulo-tracer"
-          ],
-          "STANDARD": [
-            "accumulo-tracer",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_TSERVER": {
-          "STACK-SELECT-PACKAGE": "accumulo-tablet",
-          "INSTALL": [
-            "accumulo-tablet"
-          ],
-          "PATCH": [
-            "accumulo-tablet"
-          ],
-          "STANDARD": [
-            "accumulo-tablet",
-            "accumulo-client"
-          ]
-        }
-      },
-      "ATLAS": {
-        "ATLAS_CLIENT": {
-          "STACK-SELECT-PACKAGE": "atlas-client",
-          "INSTALL": [
-            "atlas-client"
-          ],
-          "PATCH": [
-            "atlas-client"
-          ],
-          "STANDARD": [
-            "atlas-client"
-          ]
-        },
-        "ATLAS_SERVER": {
-          "STACK-SELECT-PACKAGE": "atlas-server",
-          "INSTALL": [
-            "atlas-server"
-          ],
-          "PATCH": [
-            "atlas-server"
-          ],
-          "STANDARD": [
-            "atlas-server"
-          ]
-        }
-      },
-      "DRUID": {
-        "DRUID_COORDINATOR": {
-          "STACK-SELECT-PACKAGE": "druid-coordinator",
-          "INSTALL": [
-            "druid-coordinator"
-          ],
-          "PATCH": [
-            "druid-coordinator"
-          ],
-          "STANDARD": [
-            "druid-coordinator"
-          ]
-        },
-        "DRUID_OVERLORD": {
-          "STACK-SELECT-PACKAGE": "druid-overlord",
-          "INSTALL": [
-            "druid-overlord"
-          ],
-          "PATCH": [
-            "druid-overlord"
-          ],
-          "STANDARD": [
-            "druid-overlord"
-          ]
-        },
-        "DRUID_HISTORICAL": {
-          "STACK-SELECT-PACKAGE": "druid-historical",
-          "INSTALL": [
-            "druid-historical"
-          ],
-          "PATCH": [
-            "druid-historical"
-          ],
-          "STANDARD": [
-            "druid-historical"
-          ]
-        },
-        "DRUID_BROKER": {
-          "STACK-SELECT-PACKAGE": "druid-broker",
-          "INSTALL": [
-            "druid-broker"
-          ],
-          "PATCH": [
-            "druid-broker"
-          ],
-          "STANDARD": [
-            "druid-broker"
-          ]
-        },
-        "DRUID_MIDDLEMANAGER": {
-          "STACK-SELECT-PACKAGE": "druid-middlemanager",
-          "INSTALL": [
-            "druid-middlemanager"
-          ],
-          "PATCH": [
-            "druid-middlemanager"
-          ],
-          "STANDARD": [
-            "druid-middlemanager"
-          ]
-        },
-        "DRUID_ROUTER": {
-          "STACK-SELECT-PACKAGE": "druid-router",
-          "INSTALL": [
-            "druid-router"
-          ],
-          "PATCH": [
-            "druid-router"
-          ],
-          "STANDARD": [
-            "druid-router"
-          ]
-        },
-        "DRUID_SUPERSET": {
-          "STACK-SELECT-PACKAGE": "druid-superset",
-          "INSTALL": [
-            "druid-superset"
-          ],
-          "PATCH": [
-            "druid-superset"
-          ],
-          "STANDARD": [
-            "druid-superset"
-          ]
-        }
-      },
-      "FALCON": {
-        "FALCON_CLIENT": {
-          "STACK-SELECT-PACKAGE": "falcon-client",
-          "INSTALL": [
-            "falcon-client"
-          ],
-          "PATCH": [
-            "falcon-client"
-          ],
-          "STANDARD": [
-            "falcon-client"
-          ]
-        },
-        "FALCON_SERVER": {
-          "STACK-SELECT-PACKAGE": "falcon-server",
-          "INSTALL": [
-            "falcon-server"
-          ],
-          "PATCH": [
-            "falcon-server"
-          ],
-          "STANDARD": [
-            "falcon-server"
-          ]
-        }
-      },
-      "FLUME": {
-        "FLUME_HANDLER": {
-          "STACK-SELECT-PACKAGE": "flume-server",
-          "INSTALL": [
-            "flume-server"
-          ],
-          "PATCH": [
-            "flume-server"
-          ],
-          "STANDARD": [
-            "flume-server"
-          ]
-        }
-      },
-      "HBASE": {
-        "HBASE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hbase-client",
-          "INSTALL": [
-            "hbase-client"
-          ],
-          "PATCH": [
-            "hbase-client"
-          ],
-          "STANDARD": [
-            "hbase-client",
-            "phoenix-client",
-            "hadoop-client"
-          ]
-        },
-        "HBASE_MASTER": {
-          "STACK-SELECT-PACKAGE": "hbase-master",
-          "INSTALL": [
-            "hbase-master"
-          ],
-          "PATCH": [
-            "hbase-master"
-          ],
-          "STANDARD": [
-            "hbase-master"
-          ]
-        },
-        "HBASE_REGIONSERVER": {
-          "STACK-SELECT-PACKAGE": "hbase-regionserver",
-          "INSTALL": [
-            "hbase-regionserver"
-          ],
-          "PATCH": [
-            "hbase-regionserver"
-          ],
-          "STANDARD": [
-            "hbase-regionserver"
-          ]
-        },
-        "PHOENIX_QUERY_SERVER": {
-          "STACK-SELECT-PACKAGE": "phoenix-server",
-          "INSTALL": [
-            "phoenix-server"
-          ],
-          "PATCH": [
-            "phoenix-server"
-          ],
-          "STANDARD": [
-            "phoenix-server"
-          ]
-        }
-      },
-      "HDFS": {
-        "DATANODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
-          "INSTALL": [
-            "hadoop-hdfs-datanode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-datanode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-datanode"
-          ]
-        },
-        "HDFS_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        },
-        "NAMENODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
-          "INSTALL": [
-            "hadoop-hdfs-namenode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-namenode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-namenode"
-          ]
-        },
-        "NFS_GATEWAY": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
-          "INSTALL": [
-            "hadoop-hdfs-nfs3"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-nfs3"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-nfs3"
-          ]
-        },
-        "JOURNALNODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
-          "INSTALL": [
-            "hadoop-hdfs-journalnode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-journalnode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-journalnode"
-          ]
-        },
-        "SECONDARY_NAMENODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
-          "INSTALL": [
-            "hadoop-hdfs-secondarynamenode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-secondarynamenode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-secondarynamenode"
-          ]
-        },
-        "ZKFC": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
-          "INSTALL": [
-            "hadoop-hdfs-zkfc"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-zkfc"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-zkfc"
-          ]
-        }
-      },
-      "HIVE": {
-        "HCAT": {
-          "STACK-SELECT-PACKAGE": "hive-webhcat",
-          "INSTALL": [
-            "hive-webhcat"
-          ],
-          "PATCH": [
-            "hive-webhcat"
-          ],
-          "STANDARD": [
-            "hive-webhcat"
-          ]
-        },
-        "HIVE_METASTORE": {
-          "STACK-SELECT-PACKAGE": "hive-metastore",
-          "INSTALL": [
-            "hive-metastore"
-          ],
-          "PATCH": [
-            "hive-metastore"
-          ],
-          "STANDARD": [
-            "hive-metastore"
-          ]
-        },
-        "HIVE_SERVER": {
-          "STACK-SELECT-PACKAGE": "hive-server2",
-          "INSTALL": [
-            "hive-server2"
-          ],
-          "PATCH": [
-            "hive-server2"
-          ],
-          "STANDARD": [
-            "hive-server2"
-          ]
-        },
-        "HIVE_SERVER_INTERACTIVE": {
-          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
-          "INSTALL": [
-            "hive-server2-hive2"
-          ],
-          "PATCH": [
-            "hive-server2-hive2"
-          ],
-          "STANDARD": [
-            "hive-server2-hive2"
-          ]
-        },
-        "HIVE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        },
-        "WEBHCAT_SERVER": {
-          "STACK-SELECT-PACKAGE": "hive-webhcat",
-          "INSTALL": [
-            "hive-webhcat"
-          ],
-          "PATCH": [
-            "hive-webhcat"
-          ],
-          "STANDARD": [
-            "hive-webhcat"
-          ]
-        }
-      },
-      "KAFKA": {
-        "KAFKA_BROKER": {
-          "STACK-SELECT-PACKAGE": "kafka-broker",
-          "INSTALL": [
-            "kafka-broker"
-          ],
-          "PATCH": [
-            "kafka-broker"
-          ],
-          "STANDARD": [
-            "kafka-broker"
-          ]
-        }
-      },
-      "KNOX": {
-        "KNOX_GATEWAY": {
-          "STACK-SELECT-PACKAGE": "knox-server",
-          "INSTALL": [
-            "knox-server"
-          ],
-          "PATCH": [
-            "knox-server"
-          ],
-          "STANDARD": [
-            "knox-server"
-          ]
-        }
-      },
-      "MAHOUT": {
-        "MAHOUT": {
-          "STACK-SELECT-PACKAGE": "mahout-client",
-          "INSTALL": [
-            "mahout-client"
-          ],
-          "PATCH": [
-            "mahout-client"
-          ],
-          "STANDARD": [
-            "mahout-client"
-          ]
-        }
-      },
-      "MAPREDUCE2": {
-        "HISTORYSERVER": {
-          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
-          "INSTALL": [
-            "hadoop-mapreduce-historyserver"
-          ],
-          "PATCH": [
-            "hadoop-mapreduce-historyserver"
-          ],
-          "STANDARD": [
-            "hadoop-mapreduce-historyserver"
-          ]
-        },
-        "MAPREDUCE2_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "hadoop-mapreduce-INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "OOZIE": {
-        "OOZIE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "oozie-client",
-          "INSTALL": [
-            "oozie-client"
-          ],
-          "PATCH": [
-            "oozie-client"
-          ],
-          "STANDARD": [
-            "oozie-client"
-          ]
-        },
-        "OOZIE_SERVER": {
-          "STACK-SELECT-PACKAGE": "oozie-server",
-          "INSTALL": [
-            "oozie-client",
-            "oozie-server"
-          ],
-          "PATCH": [
-            "oozie-server"
-          ],
-          "STANDARD": [
-            "oozie-client",
-            "oozie-server"
-          ]
-        }
-      },
-      "PIG": {
-        "PIG": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "RANGER": {
-        "RANGER_ADMIN": {
-          "STACK-SELECT-PACKAGE": "ranger-admin",
-          "INSTALL": [
-            "ranger-admin"
-          ],
-          "PATCH": [
-            "ranger-admin"
-          ],
-          "STANDARD": [
-            "ranger-admin"
-          ]
-        },
-        "RANGER_TAGSYNC": {
-          "STACK-SELECT-PACKAGE": "ranger-tagsync",
-          "INSTALL": [
-            "ranger-tagsync"
-          ],
-          "PATCH": [
-            "ranger-tagsync"
-          ],
-          "STANDARD": [
-            "ranger-tagsync"
-          ]
-        },
-        "RANGER_USERSYNC": {
-          "STACK-SELECT-PACKAGE": "ranger-usersync",
-          "INSTALL": [
-            "ranger-usersync"
-          ],
-          "PATCH": [
-            "ranger-usersync"
-          ],
-          "STANDARD": [
-            "ranger-usersync"
-          ]
-        }
-      },
-      "RANGER_KMS": {
-        "RANGER_KMS_SERVER": {
-          "STACK-SELECT-PACKAGE": "ranger-kms",
-          "INSTALL": [
-            "ranger-kms"
-          ],
-          "PATCH": [
-            "ranger-kms"
-          ],
-          "STANDARD": [
-            "ranger-kms"
-          ]
-        }
-      },
-      "SLIDER": {
-        "SLIDER": {
-          "STACK-SELECT-PACKAGE": "slider-client",
-          "INSTALL": [
-            "slider-client"
-          ],
-          "PATCH": [
-            "slider-client"
-          ],
-          "STANDARD": [
-            "slider-client",
-            "hadoop-client"
-          ]
-        }
-      },
-      "SPARK": {
-        "LIVY_SERVER": {
-          "STACK-SELECT-PACKAGE": "livy2-server",
-          "INSTALL": [
-            "livy2-server"
-          ],
-          "PATCH": [
-            "livy2-server"
-          ],
-          "STANDARD": [
-            "livy2-server"
-          ]
-        },
-        "SPARK_CLIENT": {
-          "STACK-SELECT-PACKAGE": "spark2-client",
-          "INSTALL": [
-            "spark2-client"
-          ],
-          "PATCH": [
-            "spark2-client"
-          ],
-          "STANDARD": [
-            "spark2-client"
-          ]
-        },
-        "SPARK_JOBHISTORYSERVER": {
-          "STACK-SELECT-PACKAGE": "spark2-historyserver",
-          "INSTALL": [
-            "spark2-historyserver"
-          ],
-          "PATCH": [
-            "spark2-historyserver"
-          ],
-          "STANDARD": [
-            "spark2-historyserver"
-          ]
-        },
-        "SPARK_THRIFTSERVER": {
-          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
-          "INSTALL": [
-            "spark2-thriftserver"
-          ],
-          "PATCH": [
-            "spark2-thriftserver"
-          ],
-          "STANDARD": [
-            "spark2-thriftserver"
-          ]
-        }
-      },
-      "SQOOP": {
-        "SQOOP": {
-          "STACK-SELECT-PACKAGE": "sqoop-client",
-          "INSTALL": [
-            "sqoop-client"
-          ],
-          "PATCH": [
-            "sqoop-client"
-          ],
-          "STANDARD": [
-            "sqoop-client"
-          ]
-        }
-      },
-      "STORM": {
-        "NIMBUS": {
-          "STACK-SELECT-PACKAGE": "storm-nimbus",
-          "INSTALL": [
-            "storm-client",
-            "storm-nimbus"
-          ],
-          "PATCH": [
-            "storm-client",
-            "storm-nimbus"
-          ],
-          "STANDARD": [
-            "storm-client",
-            "storm-nimbus"
-          ]
-        },
-        "SUPERVISOR": {
-          "STACK-SELECT-PACKAGE": "storm-supervisor",
-          "INSTALL": [
-            "storm-supervisor"
-          ],
-          "PATCH": [
-            "storm-supervisor"
-          ],
-          "STANDARD": [
-            "storm-client",
-            "storm-supervisor"
-          ]
-        },
-        "DRPC_SERVER": {
-          "STACK-SELECT-PACKAGE": "storm-client",
-          "INSTALL": [
-            "storm-client"
-          ],
-          "PATCH": [
-            "storm-client"
-          ],
-          "STANDARD": [
-            "storm-client"
-          ]
-        },
-        "STORM_UI_SERVER": {
-          "STACK-SELECT-PACKAGE": "storm-client",
-          "INSTALL": [
-            "storm-client"
-          ],
-          "PATCH": [
-            "storm-client"
-          ],
-          "STANDARD": [
-            "storm-client"
-          ]
-        }
-      },
-      "TEZ": {
-        "TEZ_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "YARN": {
-        "APP_TIMELINE_SERVER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
-          "INSTALL": [
-            "hadoop-yarn-timelineserver"
-          ],
-          "PATCH": [
-            "hadoop-yarn-timelineserver"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-timelineserver"
-          ]
-        },
-        "NODEMANAGER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
-          "INSTALL": [
-            "hadoop-yarn-nodemanager"
-          ],
-          "PATCH": [
-            "hadoop-yarn-nodemanager"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-nodemanager"
-          ]
-        },
-        "RESOURCEMANAGER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
-          "INSTALL": [
-            "hadoop-yarn-resourcemanager"
-          ],
-          "PATCH": [
-            "hadoop-yarn-resourcemanager"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-resourcemanager"
-          ]
-        },
-        "YARN_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "ZEPPELIN": {
-        "ZEPPELIN_MASTER": {
-          "STACK-SELECT-PACKAGE": "zeppelin-server",
-          "INSTALL": [
-            "zeppelin-server"
-          ],
-          "PATCH": [
-            "zeppelin-server"
-          ],
-          "STANDARD": [
-            "zeppelin-server"
-          ]
-        }
-      },
-      "ZOOKEEPER": {
-        "ZOOKEEPER_CLIENT": {
-          "STACK-SELECT-PACKAGE": "zookeeper-client",
-          "INSTALL": [
-            "zookeeper-client"
-          ],
-          "PATCH": [
-            "zookeeper-client"
-          ],
-          "STANDARD": [
-            "zookeeper-client"
-          ]
-        },
-        "ZOOKEEPER_SERVER": {
-          "STACK-SELECT-PACKAGE": "zookeeper-server",
-          "INSTALL": [
-            "zookeeper-server"
-          ],
-          "PATCH": [
-            "zookeeper-server"
-          ],
-          "STANDARD": [
-            "zookeeper-server"
-          ]
-        }
-      }
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py 
b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
index a488a96..de83f7e 100644
--- a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
@@ -115,10 +115,12 @@ class TestRUSetAll(RMFTestCase):
     with open(json_file_path, "r") as json_file:
       json_payload = json.load(json_file)
 
+    json_payload['hostLevelParams']['stack_name'] = "HDP"
     json_payload['hostLevelParams']['stack_version'] = "2.3"
     json_payload['commandParams']['version'] = "2.3.0.0-1234"
     json_payload["configurations"]["cluster-env"]["stack_tools"] = 
self.get_stack_tools()
     json_payload["configurations"]["cluster-env"]["stack_features"] = 
self.get_stack_features()
+    json_payload["configurations"]["cluster-env"]["stack_packages"] = 
self.get_stack_packages()
 
     config_dict = ConfigDictionary(json_payload)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index 7c5c7f5..135b239 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -244,25 +244,8 @@ class TestHBaseClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hbase-client', version), sudo=True)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'phoenix-client', version), sudo=True)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
-
-    self.assertEquals(3, mocks_dict['call'].call_count)
-    self.assertEquals(6, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[5][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[1][0][0])
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 42289e1..a28c3f9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -776,18 +776,7 @@ class TestHBaseMaster(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hbase-master', version), sudo=True)
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(3, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 6a2d8fb..6f27ecc 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -586,17 +586,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None), (0, None), (0, 
None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hbase-regionserver', version), sudo=True)
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(3, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 973e274..972aa61 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -439,10 +439,5 @@ class TestPhoenixQueryServer(RMFTestCase):
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
-    self.assertResourceCalled('Directory', '/etc/hbase/2.3.0.0-1234/0',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 0f31ad2..966254a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -510,22 +510,11 @@ class TestDatanode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), 
to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-hdfs-datanode', version), sudo=True,)
 
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
 
   @patch("socket.gethostbyname")
   @patch('time.sleep')

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
index 680c984..7a70578 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
@@ -107,21 +107,11 @@ class Test(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
   def test_pre_upgrade_restart(self):
     config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 06c5fdd..22e4827 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -412,16 +412,7 @@ class TestJournalnode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), 
to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-hdfs-journalnode', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index eb595c1..b26c8fb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1351,12 +1351,8 @@ class TestNamenode(RMFTestCase):
                        config_file = "nn_eu_standby.json",
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None), (0, None, ''), (0, None)] ,
                        mocks_dict=mocks_dict)
 
-    calls = mocks_dict['call'].call_args_list
-    self.assertTrue(len(calls) >= 1)
-    self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package 
hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
 
 
   @patch("hdfs_namenode.is_this_namenode_active")
@@ -1429,10 +1425,8 @@ class TestNamenode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', '/etc/hadoop/conf', 
to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
     self.assertNoMoreResources()
 
@@ -1665,11 +1659,8 @@ class TestNamenode(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = itertools.cycle([(0, None, None)]),
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-      to = '/usr/hdp/current/hadoop-client/conf')
 
     import sys
     self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", 
sys.modules["params"].hadoop_conf_dir)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index 773d3fe..4317c30 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -295,9 +295,7 @@ class TestNFSGateway(RMFTestCase):
                        config_dict = json_content,
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None), (0, None), (0, 
None)])
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), 
to='/usr/hdp/current/hadoop-client/conf')
+                       target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hadoop-hdfs-nfs3', version), sudo=True,)
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index 3bc597e..f7af5b9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -227,25 +227,8 @@ class TestHiveClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), 
to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[1][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index fc6d14e..033680c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -910,10 +910,8 @@ From source with checksum 
150f554beae04f76f814f59549dead8b"""
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), 
to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute',
 
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hive-server2', version), sudo=True,)
@@ -932,12 +930,3 @@ From source with checksum 
150f554beae04f76f814f59549dead8b"""
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index a6a4fa0..943c201 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -314,7 +314,6 @@ class TestWebHCatServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertTrue("params" in sys.modules)
@@ -325,20 +324,6 @@ class TestWebHCatServer(RMFTestCase):
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hive-webhcat', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hive-hcatalog', '--stack-version', '2.3.0.0-1234', 
'--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hive-hcatalog', '--stack-version', '2.3.0.0-1234', 
'--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[1][0][0])
 
   @patch("resource_management.core.shell.call")
   def test_rolling_restart_configure(self, call_mock):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
index 31d54ae..f160029 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
@@ -270,19 +270,8 @@ class TestOozieClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/oozie/conf'), 
to='/usr/hdp/current/oozie-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'oozie-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 17b8abf..dfa22fd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -1259,7 +1259,7 @@ class TestOozieServer(RMFTestCase):
      config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
-     call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
+     call_mocks = [(0, prepare_war_stdout)],
      mocks_dict = mocks_dict)
 
     self.assertTrue(isfile_mock.called)
@@ -1270,10 +1270,6 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     
glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
-    self.assertResourceCalled('Link', '/etc/oozie/conf',
-                              to = '/usr/hdp/current/oozie-client/conf',
-    )
-
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
@@ -1284,17 +1280,6 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('File', 
'/usr/hdp/current/oozie-server/libext/ext-2.2.zip', mode = 0644)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
 
   @patch("os.path.isdir")
   @patch("os.path.exists")
@@ -1505,7 +1490,7 @@ class TestOozieServer(RMFTestCase):
      classname = "OozieServer", command = "pre_upgrade_restart", config_dict = 
json_content,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
-     call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
+     call_mocks = [(0, prepare_war_stdout)],
      mocks_dict = mocks_dict)
 
     self.assertTrue(isfile_mock.called)
@@ -1516,7 +1501,6 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     
glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
-    self.assertResourceCalled('Link', '/etc/oozie/conf', to = 
'/usr/hdp/current/oozie-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
@@ -1532,14 +1516,3 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop 
/usr/hdp/current/oozie-server/libext/falcon-oozie-el-extension-*.jar')
 
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
index 63076f9..3c4f899 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
@@ -167,24 +167,8 @@ class TestPigClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 
'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'pig', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'pig', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[1][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py 
b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
index 4622ae3..053d44a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
@@ -141,17 +141,6 @@ class TestSqoop(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/sqoop/conf'), 
to='/usr/hdp/current/sqoop-client/conf')
     self.assertResourceCalled("Execute", ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'sqoop-client', version), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'sqoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'sqoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index e156af2..ba3d0ab 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -791,6 +791,3 @@ class TestHistoryServer(RMFTestCase):
     )
 
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 5898355..75eff39 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -416,21 +416,11 @@ class TestMapReduce2Client(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
   def test_stack_upgrade_save_new_config(self):
     config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 4281696..642043d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -663,17 +663,7 @@ class TestNodeManager(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index 652fea8..8e92116 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -552,17 +552,7 @@ class TestResourceManager(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-yarn-resourcemanager', version), 
sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 09a6278..375028a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -583,17 +583,7 @@ class TestYarnClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['call'].call_args_list[0][0][0])

Reply via email to