Hello community,

here is the log from the commit of package azure-cli-dla for openSUSE:Factory 
checked in at 2018-02-14 09:30:41
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/azure-cli-dla (Old)
 and      /work/SRC/openSUSE:Factory/.azure-cli-dla.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "azure-cli-dla"

Wed Feb 14 09:30:41 2018 rev:2 rq:574807 version:0.0.18

Changes:
--------
--- /work/SRC/openSUSE:Factory/azure-cli-dla/azure-cli-dla.changes      
2017-11-10 14:53:33.884395299 +0100
+++ /work/SRC/openSUSE:Factory/.azure-cli-dla.new/azure-cli-dla.changes 
2018-02-14 09:30:42.805455419 +0100
@@ -1,0 +2,10 @@
+Wed Feb  7 15:58:22 UTC 2018 - adrian.glaub...@suse.com
+
+- New upstream release
+  + Version 0.0.18
+  + For detailed information about changes see the
+    HISTORY.rst file provided with this package
+- Install HISTORY.rst into doc directory
+- Update Requires from setup.py
+
+-------------------------------------------------------------------

Old:
----
  azure-cli-dla-0.0.12.tar.gz

New:
----
  azure-cli-dla-0.0.18.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ azure-cli-dla.spec ++++++
--- /var/tmp/diff_new_pack.pPHNz8/_old  2018-02-14 09:30:44.033410922 +0100
+++ /var/tmp/diff_new_pack.pPHNz8/_new  2018-02-14 09:30:44.037410778 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package azure-cli-dla
 #
-# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -15,8 +15,9 @@
 # Please submit bugfixes or comments via http://bugs.opensuse.org/
 #
 
+
 Name:           azure-cli-dla
-Version:        0.0.12
+Version:        0.0.18
 Release:        0
 Summary:        Microsoft Azure CLI 'data lake analytics' Command Module
 License:        MIT
@@ -24,18 +25,18 @@
 Url:            https://github.com/Azure/azure-cli
 Source:         
https://files.pythonhosted.org/packages/source/a/azure-cli-dla/azure-cli-dla-%{version}.tar.gz
 Source1:        LICENSE.txt
-BuildRequires:  python3-devel
-BuildRequires:  python3-setuptools
-BuildRequires:  unzip
 BuildRequires:  azure-cli-command-modules-nspkg
 BuildRequires:  azure-cli-nspkg
 BuildRequires:  python3-azure-nspkg
+BuildRequires:  python3-devel
+BuildRequires:  python3-setuptools
+BuildRequires:  unzip
 Requires:       azure-cli-command-modules-nspkg
+Requires:       azure-cli-core
 Requires:       azure-cli-nspkg
+Requires:       python3-azure-mgmt-datalake-analytics >= 0.2.0
+Requires:       python3-azure-mgmt-datalake-store >= 0.2.0
 Requires:       python3-azure-nspkg
-Requires:       python3-azure-mgmt-datalake-store >= 0.1.6
-Requires:       python3-azure-mgmt-datalake-analytics >= 0.1.6
-Requires:       azure-cli-core
 Conflicts:      azure-cli < 2.0.0
 
 BuildArch:      noarch
@@ -64,7 +65,8 @@
 
 %files
 %defattr(-,root,root,-)
-%doc LICENSE.txt README.rst
+%doc HISTORY.rst LICENSE.txt README.rst
 %{python3_sitelib}/azure/cli/command_modules/dla
 %{python3_sitelib}/azure_cli_dla-*.egg-info
+
 %changelog

++++++ azure-cli-dla-0.0.12.tar.gz -> azure-cli-dla-0.0.18.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-cli-dla-0.0.12/HISTORY.rst 
new/azure-cli-dla-0.0.18/HISTORY.rst
--- old/azure-cli-dla-0.0.12/HISTORY.rst        2017-09-23 01:47:00.000000000 
+0200
+++ new/azure-cli-dla-0.0.18/HISTORY.rst        2018-01-26 17:12:23.000000000 
+0100
@@ -2,6 +2,33 @@
 
 Release History
 ===============
+
+0.0.18
+++++++
+* Performance fixes.
+
+0.0.17
+++++++
+* Update helpfile
+  
+0.0.16
+++++++
+* Update for CLI core changes.
+
+0.0.15
+++++++
+* Change the return type of the job list command: a list of JobInformation to 
a list of JobInformationBasic
+* Change the return type of the account list command: a list of 
DataLakeAnalyticsAccount to a list of DataLakeAnalyticsAccountBasic
+* The properties of a Basic type is a strict subset of the properties of a 
regular type
+
+0.0.14
+++++++
+* Minor fixes.
+
+0.0.13
+++++++
+* minor fixes
+
 0.0.12 (2017-09-22)
 +++++++++++++++++++
 * minor fixes
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-cli-dla-0.0.12/PKG-INFO 
new/azure-cli-dla-0.0.18/PKG-INFO
--- old/azure-cli-dla-0.0.12/PKG-INFO   2017-09-23 01:48:50.000000000 +0200
+++ new/azure-cli-dla-0.0.18/PKG-INFO   2018-01-26 17:12:45.000000000 +0100
@@ -1,12 +1,11 @@
 Metadata-Version: 1.1
 Name: azure-cli-dla
-Version: 0.0.12
+Version: 0.0.18
 Summary: Microsoft Azure Command-Line Tools Data Lake Analytics Command Module
 Home-page: https://github.com/Azure/azure-cli
 Author: Microsoft Corporation
 Author-email: azpy...@microsoft.com
 License: MIT
-Description-Content-Type: UNKNOWN
 Description: Microsoft Azure CLI 'data lake analytics' Command Module
         ========================================================
         
@@ -20,6 +19,33 @@
         
         Release History
         ===============
+        
+        0.0.18
+        ++++++
+        * Performance fixes.
+        
+        0.0.17
+        ++++++
+        * Update helpfile
+          
+        0.0.16
+        ++++++
+        * Update for CLI core changes.
+        
+        0.0.15
+        ++++++
+        * Change the return type of the job list command: a list of 
JobInformation to a list of JobInformationBasic
+        * Change the return type of the account list command: a list of 
DataLakeAnalyticsAccount to a list of DataLakeAnalyticsAccountBasic
+        * The properties of a Basic type is a strict subset of the properties 
of a regular type
+        
+        0.0.14
+        ++++++
+        * Minor fixes.
+        
+        0.0.13
+        ++++++
+        * minor fixes
+        
         0.0.12 (2017-09-22)
         +++++++++++++++++++
         * minor fixes
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/__init__.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/__init__.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/__init__.py  
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/__init__.py  
2018-01-26 17:12:23.000000000 +0100
@@ -3,12 +3,29 @@
 # Licensed under the MIT License. See License.txt in the project root for 
license information.
 # 
--------------------------------------------------------------------------------------------
 # pylint: disable=unused-import
-import azure.cli.command_modules.dla._help
 
+from azure.cli.core import AzCommandsLoader
 
-def load_params(_):
-    import azure.cli.command_modules.dla._params  # pylint: 
disable=redefined-outer-name, unused-variable
+from azure.cli.command_modules.dla._help import helps  # pylint: 
disable=unused-import
 
 
-def load_commands():
-    import azure.cli.command_modules.dla.commands  # pylint: 
disable=redefined-outer-name, unused-variable
+class DataLakeAnalyticsCommandsLoader(AzCommandsLoader):
+
+    def __init__(self, cli_ctx=None):
+        from azure.cli.core.commands import CliCommandType
+        dla_custom = 
CliCommandType(operations_tmpl='azure.cli.command_modules.dla.custom#{}')
+        super(DataLakeAnalyticsCommandsLoader, self).__init__(cli_ctx=cli_ctx,
+                                                              
min_profile='2017-03-10-profile',
+                                                              
custom_command_type=dla_custom)
+
+    def load_command_table(self, args):
+        from azure.cli.command_modules.dla.commands import load_command_table
+        load_command_table(self, args)
+        return self.command_table
+
+    def load_arguments(self, command):
+        from azure.cli.command_modules.dla._params import load_arguments
+        load_arguments(self, command)
+
+
+COMMAND_LOADER_CLS = DataLakeAnalyticsCommandsLoader
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_client_factory.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_client_factory.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_client_factory.py   
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_client_factory.py   
2018-01-26 17:12:23.000000000 +0100
@@ -2,78 +2,81 @@
 # Copyright (c) Microsoft Corporation. All rights reserved.
 # Licensed under the MIT License. See License.txt in the project root for 
license information.
 # 
--------------------------------------------------------------------------------------------
-from azure.cli.core._profile import CLOUD
 
 
-def cf_dla_account(_):
+def cf_dla_account(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-    return 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).account
+    return get_mgmt_service_client(cli_ctx, 
DataLakeAnalyticsAccountManagementClient).account
 
 
-def cf_dla_account_firewall(_):
+def cf_dla_account_firewall(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-    return 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).firewall_rules
+    return get_mgmt_service_client(cli_ctx, 
DataLakeAnalyticsAccountManagementClient).firewall_rules
 
 
-def cf_dla_account_compute_policy(_):
+def cf_dla_account_compute_policy(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-    return 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).compute_policies
+    return get_mgmt_service_client(cli_ctx, 
DataLakeAnalyticsAccountManagementClient).compute_policies
 
 
-def cf_dla_account_storage(_):
+def cf_dla_account_storage(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-    return 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).storage_accounts
+    return get_mgmt_service_client(cli_ctx, 
DataLakeAnalyticsAccountManagementClient).storage_accounts
 
 
-def cf_dla_account_adls(_):
+def cf_dla_account_adls(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-    return 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).data_lake_store_accounts
+    return get_mgmt_service_client(cli_ctx, 
DataLakeAnalyticsAccountManagementClient).data_lake_store_accounts
 
 
-def cf_dla_catalog(_):
+def cf_dla_catalog(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.catalog import 
DataLakeAnalyticsCatalogManagementClient
     return get_mgmt_service_client(
+        cli_ctx,
         DataLakeAnalyticsCatalogManagementClient,
         subscription_bound=False,
         base_url_bound=False,
-        resource=CLOUD.endpoints.active_directory_data_lake_resource_id,
-        
adla_catalog_dns_suffix=CLOUD.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).catalog
+        
resource=cli_ctx.cloud.endpoints.active_directory_data_lake_resource_id,
+        
adla_catalog_dns_suffix=cli_ctx.cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).catalog
 
 
-def cf_dla_job(_):
+def cf_dla_job(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.job import 
DataLakeAnalyticsJobManagementClient
     return get_mgmt_service_client(
+        cli_ctx,
         DataLakeAnalyticsJobManagementClient,
         subscription_bound=False,
         base_url_bound=False,
-        resource=CLOUD.endpoints.active_directory_data_lake_resource_id,
-        
adla_job_dns_suffix=CLOUD.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).job
+        
resource=cli_ctx.cloud.endpoints.active_directory_data_lake_resource_id,
+        
adla_job_dns_suffix=cli_ctx.cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).job
 
 
-def cf_dla_job_recurrence(_):
+def cf_dla_job_recurrence(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.job import 
DataLakeAnalyticsJobManagementClient
     return get_mgmt_service_client(
+        cli_ctx,
         DataLakeAnalyticsJobManagementClient,
         subscription_bound=False,
         base_url_bound=False,
-        resource=CLOUD.endpoints.active_directory_data_lake_resource_id,
-        
adla_job_dns_suffix=CLOUD.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).recurrence
+        
resource=cli_ctx.cloud.endpoints.active_directory_data_lake_resource_id,
+        
adla_job_dns_suffix=cli_ctx.cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).recurrence
 
 
-def cf_dla_job_pipeline(_):
+def cf_dla_job_pipeline(cli_ctx, _):
     from azure.cli.core.commands.client_factory import get_mgmt_service_client
     from azure.mgmt.datalake.analytics.job import 
DataLakeAnalyticsJobManagementClient
     return get_mgmt_service_client(
+        cli_ctx,
         DataLakeAnalyticsJobManagementClient,
         subscription_bound=False,
         base_url_bound=False,
-        resource=CLOUD.endpoints.active_directory_data_lake_resource_id,
-        
adla_job_dns_suffix=CLOUD.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).pipeline
+        
resource=cli_ctx.cloud.endpoints.active_directory_data_lake_resource_id,
+        
adla_job_dns_suffix=cli_ctx.cloud.suffixes.azure_datalake_analytics_catalog_and_job_endpoint).pipeline
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_help.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_help.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_help.py     
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_help.py     
2018-01-26 17:12:23.000000000 +0100
@@ -3,7 +3,7 @@
 # Licensed under the MIT License. See License.txt in the project root for 
license information.
 # 
--------------------------------------------------------------------------------------------
 
-from azure.cli.core.help_files import helps
+from knack.help_files import helps
 
 
 helps['dla'] = """
@@ -25,7 +25,7 @@
           short-summary: Name for the submitted job.
         - name: --script
           type: string
-          short-summary: Script to submit. This may be an @{file} to load from 
a file.
+          short-summary: Script to submit. This may be '@{file}' to load from 
a file.
         - name: --runtime-version
           short-summary: The runtime version to use.
           long-summary: This parameter is used for explicitly overwriting the 
default runtime. It should only be done if you know what you are doing.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_params.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_params.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_params.py   
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_params.py   
2018-01-26 17:12:23.000000000 +0100
@@ -3,84 +3,94 @@
 # Licensed under the MIT License. See License.txt in the project root for 
license information.
 # 
--------------------------------------------------------------------------------------------
 from argcomplete.completers import FilesCompleter
-from azure.cli.core.commands import \
-    (register_cli_argument, CliArgumentType)
-from azure.cli.core.commands.parameters import \
-    (tags_type,
-     get_resource_name_completion_list,
-     resource_group_name_type,
-     enum_choice_list)
-
-from azure.cli.command_modules.dla._validators import 
(validate_resource_group_name,
-                                                       datetime_format,
-                                                       
process_dla_job_submit_namespace)
-
-
-from 
azure.mgmt.datalake.analytics.account.models.data_lake_analytics_account_management_client_enums
 \
-    import (FirewallState,
-            TierType,
-            FirewallAllowAzureIpsState,
-            AADObjectType)
-
-from 
azure.mgmt.datalake.analytics.job.models.data_lake_analytics_job_management_client_enums
 \
-    import (CompileMode,
-            JobState,
-            JobResult)
-
-
-# ARGUMENT DEFINITIONS
-# pylint: disable=line-too-long
-datalake_analytics_name_type = CliArgumentType(help='Name of the Data Lake 
Analytics account.', options_list=('--account_name',), 
completer=get_resource_name_completion_list('Microsoft.DataLakeAnalytics/accounts'),
 id_part='name')
-
-# PARAMETER REGISTRATIONS
-# data lake analytics common params
-register_cli_argument('dla', 'resource_group_name', resource_group_name_type, 
id_part=None, required=False, help='If not specified, will attempt to discover 
the resource group for the specified Data Lake Analytics account.', 
validator=validate_resource_group_name)
-register_cli_argument('dla', 'top', help='Maximum number of items to return.', 
type=int)
-register_cli_argument('dla', 'skip', help='The number of items to skip over 
before returning elements.', type=int)
-register_cli_argument('dla', 'count', help='The Boolean value of true or false 
to request a count of the matching resources included with the resources in the 
response, e.g. Categories?$count=true.', type=bool)
-
-# account params
-register_cli_argument('dla', 'account_name', datalake_analytics_name_type, 
options_list=('--account', '-n'))
-register_cli_argument('dla account', 'tags', tags_type)
-register_cli_argument('dla account', 'tier', help='The desired commitment tier 
for this account to use.', **enum_choice_list(TierType))
-register_cli_argument('dla account create', 'resource_group_name', 
resource_group_name_type, validator=None)
-register_cli_argument('dla account create', 'account_name', 
datalake_analytics_name_type, options_list=('--account', '-n'), completer=None)
-register_cli_argument('dla account update', 'firewall_state', 
help='Enable/disable existing firewall rules.', 
**enum_choice_list(FirewallState))
-register_cli_argument('dla account update', 'allow_azure_ips', 
help='Allow/block Azure originating IPs through the firewall', 
**enum_choice_list(FirewallAllowAzureIpsState))
-register_cli_argument('dla account update', 'max_job_count', help='The maximum 
supported jobs running under the account at the same time.', type=int)
-register_cli_argument('dla account update', 'max_degree_of_parallelism', 
help='The maximum supported degree of parallelism for this account.', type=int)
-register_cli_argument('dla account update', 'query_store_retention', help='The 
number of days that job metadata is retained.', type=int)
-register_cli_argument('dla account list', 'resource_group_name', 
resource_group_name_type, validator=None)
-
-# storage parameters
-register_cli_argument('dla account blob-storage', 'access_key', help='the 
access key associated with this Azure Storage account that will be used to 
connect to it')
-register_cli_argument('dla account blob-storage', 'suffix', help='the optional 
suffix for the storage account')
-
-# Job params
-# pylint: disable=line-too-long
-register_cli_argument('dla job submit', 'compile_mode', help='Indicates the 
type of compilation to be done on this job. Valid values are: \'Semantic\' 
(Only performs semantic checks and necessary sanity checks), \'Full\' (full 
compilation) and \'SingleBox\' (Full compilation performed locally)', 
**enum_choice_list(CompileMode))
-register_cli_argument('dla job submit', 'compile_only', help='Indicates that 
the submission should only build the job and not execute if set to true.', 
action='store_true')
-register_cli_argument('dla job submit', 'script', completer=FilesCompleter(), 
help="The script to submit. This is either the script contents or use `@<file 
path>` to load the script from a file", 
validator=process_dla_job_submit_namespace)
-register_cli_argument('dla job wait', 'max_wait_time_sec', help='The maximum 
amount of time to wait before erroring out. Default value is to never timeout. 
Any value <= 0 means never timeout', type=int)
-register_cli_argument('dla job wait', 'wait_interval_sec', help='The polling 
interval between checks for the job status, in seconds.', type=int)
-register_cli_argument('dla job list', 'submitted_after', help='A filter which 
returns jobs only submitted after the specified time, in ISO-8601 format.', 
type=datetime_format)
-register_cli_argument('dla job list', 'submitted_before', help='A filter which 
returns jobs only submitted before the specified time, in ISO-8601 format.', 
type=datetime_format)
-register_cli_argument('dla job list', 'state', help='A filter which returns 
jobs with only the specified state(s).', nargs='*', 
**enum_choice_list(JobState))
-register_cli_argument('dla job list', 'result', help='A filter which returns 
jobs with only the specified result(s).', nargs='*', 
**enum_choice_list(JobResult))
-register_cli_argument('dla job list', 'submitter', help='A filter which 
returns jobs only by the specified submitter.')
-register_cli_argument('dla job list', 'name', help='A filter which returns 
jobs only by the specified friendly name.')
-register_cli_argument('dla job list', 'pipeline_id', help='A filter which 
returns jobs only containing the specified pipeline_id.')
-register_cli_argument('dla job list', 'recurrence_id', help='A filter which 
returns jobs only containing the specified recurrence_id.')
-
-# credential params
-register_cli_argument('dla catalog credential create', 
'credential_user_password', options_list=('--password', '-p'), help='Password 
for the credential user. Will prompt if not given.')
-register_cli_argument('dla catalog credential create', 'credential_user_name', 
options_list=('--user-name',))
-register_cli_argument('dla catalog credential update', 'credential_user_name', 
options_list=('--user-name',))
-register_cli_argument('dla catalog credential update', 
'credential_user_password', options_list=('--password', '-p'), help='Current 
password for the credential user. Will prompt if not given.')
-register_cli_argument('dla catalog credential update', 
'new_credential_user_password', options_list=('--new-password',), help='New 
password for the credential user. Will prompt if not given.')
-
-# compute policy params
-register_cli_argument('dla account compute_policy', 'max_dop_per_job', 
help='The maximum degree of parallelism allowed per job for this policy. At 
least one of --min-priority-per-job and --max-dop-per-job must be specified.', 
type=int)
-register_cli_argument('dla account compute_policy', 'min_priority_per_job', 
help='The minimum priority allowed per job for this policy. At least one of 
--min-priority-per-job and --max-dop-per-job must be specified.', type=int)
-register_cli_argument('dla account compute_policy create', 'object_id', 
help='The Azure Active Directory object ID of the user, group or service 
principal to apply the policy to.')
-register_cli_argument('dla account compute_policy create', 'object_type', 
help='The Azure Active Directory object type associated with the supplied 
object id.', **enum_choice_list(AADObjectType))
+
+from knack.arguments import CLIArgumentType
+
+from azure.cli.core.commands.parameters import (
+    tags_type, get_resource_name_completion_list, resource_group_name_type, 
get_enum_type)
+
+from azure.cli.command_modules.dla._validators import 
validate_resource_group_name, datetime_format
+
+
+# pylint: disable=line-too-long, too-many-statements
+def load_arguments(self, _):
+
+    from 
azure.mgmt.datalake.analytics.account.models.data_lake_analytics_account_management_client_enums
 import (
+        FirewallState, TierType, FirewallAllowAzureIpsState, AADObjectType)
+
+    from 
azure.mgmt.datalake.analytics.job.models.data_lake_analytics_job_management_client_enums
 import (
+        CompileMode, JobState, JobResult)
+
+    datalake_analytics_name_type = CLIArgumentType(help='Name of the Data Lake 
Analytics account.', options_list=('--account_name',), 
completer=get_resource_name_completion_list('Microsoft.DataLakeAnalytics/accounts'),
 id_part='name')
+
+    # PARAMETER REGISTRATIONS
+    # common
+    with self.argument_context('dla') as c:
+        c.argument('resource_group_name', resource_group_name_type, 
id_part=None, required=False, help='If not specified, will attempt to discover 
the resource group for the specified Data Lake Analytics account.', 
validator=validate_resource_group_name)
+        c.argument('top', help='Maximum number of items to return.', type=int)
+        c.argument('skip', help='The number of items to skip over before 
returning elements.', type=int)
+        c.argument('count', help='The Boolean value of true or false to 
request a count of the matching resources included with the resources in the 
response, e.g. Categories?$count=true.', type=bool)
+        c.argument('account_name', datalake_analytics_name_type, 
options_list=['--account', '-n'])
+
+    # account
+    with self.argument_context('dla account') as c:
+        c.argument('tags', tags_type)
+        c.argument('tier', arg_type=get_enum_type(TierType), help='The desired 
commitment tier for this account to use.')
+
+    with self.argument_context('dla account create') as c:
+        c.argument('resource_group_name', resource_group_name_type, 
validator=None)
+        c.argument('account_name', datalake_analytics_name_type, 
options_list=('--account', '-n'), completer=None)
+
+    with self.argument_context('dla account update') as c:
+        c.argument('firewall_state', help='Enable/disable existing firewall 
rules.', arg_type=get_enum_type(FirewallState))
+        c.argument('allow_azure_ips', help='Allow/block Azure originating IPs 
through the firewall', arg_type=get_enum_type(FirewallAllowAzureIpsState))
+        c.argument('max_job_count', help='The maximum supported jobs running 
under the account at the same time.', type=int)
+        c.argument('max_degree_of_parallelism', help='The maximum supported 
degree of parallelism for this account.', type=int)
+        c.argument('query_store_retention', help='The number of days that job 
metadata is retained.', type=int)
+
+    with self.argument_context('dla account list') as c:
+        c.argument('resource_group_name', resource_group_name_type, 
validator=None)
+
+    # storage
+    with self.argument_context('dla account blob-storage') as c:
+        c.argument('access_key', help='the access key associated with this 
Azure Storage account that will be used to connect to it')
+        c.argument('suffix', help='the optional suffix for the storage 
account')
+
+    # job
+    with self.argument_context('dla job submit') as c:
+        c.argument('compile_mode', arg_type=get_enum_type(CompileMode), 
help='Indicates the type of compilation to be done on this job. Valid values 
are: \'Semantic\' (Only performs semantic checks and necessary sanity checks), 
\'Full\' (full compilation) and \'SingleBox\' (Full compilation performed 
locally)')
+        c.argument('compile_only', help='Indicates that the submission should 
only build the job and not execute if set to true.', action='store_true')
+        c.argument('script', completer=FilesCompleter(), help="The script to 
submit. This is either the script contents or use `@<file path>` to load the 
script from a file")
+
+    with self.argument_context('dla job wait') as c:
+        c.argument('max_wait_time_sec', help='The maximum amount of time to 
wait before erroring out. Default value is to never timeout. Any value <= 0 
means never timeout', type=int)
+        c.argument('wait_interval_sec', help='The polling interval between 
checks for the job status, in seconds.', type=int)
+
+    with self.argument_context('dla job list') as c:
+        c.argument('submitted_after', help='A filter which returns jobs only 
submitted after the specified time, in ISO-8601 format.', type=datetime_format)
+        c.argument('submitted_before', help='A filter which returns jobs only 
submitted before the specified time, in ISO-8601 format.', type=datetime_format)
+        c.argument('state', arg_type=get_enum_type(JobState), help='A filter 
which returns jobs with only the specified state(s).', nargs='*')
+        c.argument('result', arg_type=get_enum_type(JobResult), help='A filter 
which returns jobs with only the specified result(s).', nargs='*')
+        c.argument('submitter', help='A filter which returns jobs only by the 
specified submitter.')
+        c.argument('name', help='A filter which returns jobs only by the 
specified friendly name.')
+        c.argument('pipeline_id', help='A filter which returns jobs only 
containing the specified pipeline_id.')
+        c.argument('recurrence_id', help='A filter which returns jobs only 
containing the specified recurrence_id.')
+
+    # credential
+    with self.argument_context('dla catalog credential create') as c:
+        c.argument('credential_user_password', options_list=['--password', 
'-p'], help='Password for the credential user. Will prompt if not given.')
+        c.argument('credential_user_name', options_list=['--user-name'])
+
+    with self.argument_context('dla catalog credential update') as c:
+        c.argument('credential_user_name', options_list=['--user-name'])
+        c.argument('credential_user_password', options_list=['--password', 
'-p'], help='Current password for the credential user. Will prompt if not 
given.')
+        c.argument('new_credential_user_password', 
options_list=['--new-password'], help='New password for the credential user. 
Will prompt if not given.')
+
+    # compute policy
+    with self.argument_context('dla account compute_policy') as c:
+        c.argument('max_dop_per_job', help='The maximum degree of parallelism 
allowed per job for this policy. At least one of --min-priority-per-job and 
--max-dop-per-job must be specified.', type=int)
+        c.argument('min_priority_per_job', help='The minimum priority allowed 
per job for this policy. At least one of --min-priority-per-job and 
--max-dop-per-job must be specified.', type=int)
+
+    with self.argument_context('dla account compute_policy create') as c:
+        c.argument('object_id', help='The Azure Active Directory object ID of 
the user, group or service principal to apply the policy to.')
+        c.argument('object_type', arg_type=get_enum_type(AADObjectType), 
help='The Azure Active Directory object type associated with the supplied 
object id.')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_validators.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_validators.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/_validators.py       
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/_validators.py       
2018-01-26 17:12:23.000000000 +0100
@@ -2,13 +2,10 @@
 # Copyright (c) Microsoft Corporation. All rights reserved.
 # Licensed under the MIT License. See License.txt in the project root for 
license information.
 # 
--------------------------------------------------------------------------------------------
-from msrest.serialization import Deserializer
-from msrest.exceptions import DeserializationError
+
+from knack.util import CLIError
 
 from azure.cli.core.commands.client_factory import get_mgmt_service_client
-from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
-from azure.cli.core.commands.arm import parse_resource_id
-from azure.cli.core.util import CLIError
 
 
 # Helpers
@@ -19,6 +16,7 @@
     :return: resource group name or None
     :rtype: str
     """
+    from msrestazure.tools import parse_resource_id
     for acct in client.list():
         id_comps = parse_resource_id(acct.id)
         if id_comps['name'] == account_name:
@@ -29,16 +27,19 @@
 
 
 # COMMAND NAMESPACE VALIDATORS
-def validate_resource_group_name(ns):
+def validate_resource_group_name(cmd, ns):
+    from azure.mgmt.datalake.analytics.account import 
DataLakeAnalyticsAccountManagementClient
     if not ns.resource_group_name:
         account_name = ns.account_name
-        client = 
get_mgmt_service_client(DataLakeAnalyticsAccountManagementClient).account
+        client = get_mgmt_service_client(cmd.cli_ctx, 
DataLakeAnalyticsAccountManagementClient).account
         group_name = _get_resource_group_from_account_name(client, 
account_name)
         ns.resource_group_name = group_name
 
 
 def datetime_format(value):
     """Validate the correct format of a datetime string and deserialize."""
+    from msrest.serialization import Deserializer
+    from msrest.exceptions import DeserializationError
     try:
         datetime_obj = Deserializer.deserialize_iso(value)
     except DeserializationError:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/commands.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/commands.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/commands.py  
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/commands.py  
2018-01-26 17:12:23.000000000 +0100
@@ -4,121 +4,185 @@
 # 
--------------------------------------------------------------------------------------------
 
 # pylint: disable=line-too-long
-from azure.cli.core.commands import cli_command
-from azure.cli.core.profiles import supported_api_version, PROFILE_TYPE
-from azure.cli.command_modules.dla._client_factory import (cf_dla_account,
-                                                           
cf_dla_account_firewall,
-                                                           cf_dla_account_adls,
-                                                           
cf_dla_account_storage,
-                                                           cf_dla_job,
-                                                           cf_dla_catalog,
-                                                           cf_dla_job_pipeline,
-                                                           
cf_dla_job_recurrence,
-                                                           
cf_dla_account_compute_policy)
-
-if not supported_api_version(PROFILE_TYPE, max_api='2017-03-09-profile'):
-    adla_format_path = 'azure.mgmt.datalake.analytics.{}.operations.{}#{}.{}'
-    adla_custom_format_path = 'azure.cli.command_modules.dla.custom#{}'
+from azure.cli.core.commands import CliCommandType
+from azure.cli.command_modules.dla._client_factory import (
+    cf_dla_account,
+    cf_dla_account_firewall,
+    cf_dla_account_adls,
+    cf_dla_account_storage,
+    cf_dla_job,
+    cf_dla_catalog,
+    cf_dla_job_pipeline,
+    cf_dla_job_recurrence,
+    cf_dla_account_compute_policy)
+from azure.cli.command_modules.dla._validators import 
process_dla_job_submit_namespace
+
+
+# pylint: disable=too-many-statements
+def load_command_table(self, _):
+
+    adla_format_path = 'azure.mgmt.datalake.analytics.{}.operations.{}#{}.{{}}'
+
+    dla_account_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('account', 
'account_operations', 'AccountOperations'),
+        client_factory=cf_dla_account)
+
+    dla_firewall_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('account', 
'firewall_rules_operations', 'FirewallRulesOperations'),
+        client_factory=cf_dla_account_firewall)
+
+    dla_job_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('job', 'job_operations', 
'JobOperations'),
+        client_factory=cf_dla_job)
+
+    dla_job_pipeline_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('job', 'pipeline_operations', 
'PipelineOperations'),
+        client_factory=cf_dla_job_pipeline)
+
+    dla_job_recurrence_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('job', 
'recurrence_operations', 'RecurrenceOperations'),
+        client_factory=cf_dla_job_recurrence)
+
+    dla_storage_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('account', 
'storage_accounts_operations', 'StorageAccountsOperations'),
+        client_factory=cf_dla_account_storage)
+
+    dla_dls_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('account', 
'data_lake_store_accounts_operations', 'DataLakeStoreAccountsOperations'),
+        client_factory=cf_dla_account_adls
+    )
+
+    dla_catalog_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('catalog', 
'catalog_operations', 'CatalogOperations'),
+        client_factory=cf_dla_catalog
+    )
+
+    dla_compute_policy_sdk = CliCommandType(
+        operations_tmpl=adla_format_path.format('account', 
'compute_policies_operations', 'ComputePoliciesOperations'),
+        client_factory=cf_dla_account_compute_policy
+    )
 
     # account operations
-    cli_command(__name__, 'dla account create', 
adla_custom_format_path.format('create_adla_account'), cf_dla_account)
-    cli_command(__name__, 'dla account update', 
adla_custom_format_path.format('update_adla_account'), cf_dla_account)
-    cli_command(__name__, 'dla account list', 
adla_custom_format_path.format('list_adla_account'), cf_dla_account)
-    cli_command(__name__, 'dla account show', 
adla_format_path.format('account', 'account_operations', 'AccountOperations', 
'get'), cf_dla_account)
-    cli_command(__name__, 'dla account delete', 
adla_format_path.format('account', 'account_operations', 'AccountOperations', 
'delete'), cf_dla_account)
+    with self.command_group('dla account', dla_account_sdk, 
client_factory=cf_dla_account) as g:
+        g.custom_command('create', 'create_adla_account')
+        g.custom_command('update', 'update_adla_account')
+        g.custom_command('list', 'list_adla_account')
+        g.command('show', 'get')
+        g.command('delete', 'delete')
 
     # account fire wall operations
-    cli_command(__name__, 'dla account firewall create', 
adla_custom_format_path.format('add_adla_firewall_rule'), 
cf_dla_account_firewall)
-    cli_command(__name__, 'dla account firewall update', 
adla_format_path.format('account', 'firewall_rules_operations', 
'FirewallRulesOperations', 'update'), cf_dla_account_firewall)
-    cli_command(__name__, 'dla account firewall list', 
adla_format_path.format('account', 'firewall_rules_operations', 
'FirewallRulesOperations', 'list_by_account'), cf_dla_account_firewall)
-    cli_command(__name__, 'dla account firewall show', 
adla_format_path.format('account', 'firewall_rules_operations', 
'FirewallRulesOperations', 'get'), cf_dla_account_firewall)
-    cli_command(__name__, 'dla account firewall delete', 
adla_format_path.format('account', 'firewall_rules_operations', 
'FirewallRulesOperations', 'delete'), cf_dla_account_firewall)
+    with self.command_group('dla account firewall', dla_firewall_sdk, 
client_factory=cf_dla_account_firewall) as g:
+        g.custom_command('create', 'add_adla_firewall_rule')
+        g.command('update', 'update')
+        g.command('list', 'list_by_account')
+        g.command('show', 'get')
+        g.command('delete', 'delete')
 
     # job operations
     # todo: update to allow for inclusion of statistics/debug data in show
-    cli_command(__name__, 'dla job submit', 
adla_custom_format_path.format('submit_adla_job'), cf_dla_job)
-    cli_command(__name__, 'dla job wait', 
adla_custom_format_path.format('wait_adla_job'), cf_dla_job)
-    cli_command(__name__, 'dla job show', adla_format_path.format('job', 
'job_operations', 'JobOperations', 'get'), cf_dla_job)
-    cli_command(__name__, 'dla job cancel', adla_format_path.format('job', 
'job_operations', 'JobOperations', 'cancel'), cf_dla_job)
-    cli_command(__name__, 'dla job list', 
adla_custom_format_path.format('list_adla_jobs'), cf_dla_job)
+    with self.command_group('dla job', dla_job_sdk, client_factory=cf_dla_job) 
as g:
+        g.custom_command('submit', 'submit_adla_job', 
validator=process_dla_job_submit_namespace)
+        g.custom_command('wait', 'wait_adla_job')
+        g.command('show', 'get')
+        g.command('cancel', 'cancel')
+        g.custom_command('list', 'list_adla_jobs')
 
     # job relationship operations
-    cli_command(__name__, 'dla job pipeline show', 
adla_format_path.format('job', 'pipeline_operations', 'PipelineOperations', 
'get'), cf_dla_job_pipeline)
-    cli_command(__name__, 'dla job pipeline list', 
adla_format_path.format('job', 'pipeline_operations', 'PipelineOperations', 
'list'), cf_dla_job_pipeline)
-    cli_command(__name__, 'dla job recurrence show', 
adla_format_path.format('job', 'recurrence_operations', 'RecurrenceOperations', 
'get'), cf_dla_job_recurrence)
-    cli_command(__name__, 'dla job recurrence list', 
adla_format_path.format('job', 'recurrence_operations', 'RecurrenceOperations', 
'list'), cf_dla_job_recurrence)
+    with self.command_group('dla job pipeline', dla_job_pipeline_sdk) as g:
+        g.command('show', 'get')
+        g.command('list', 'list')
+
+    with self.command_group('dla job recurrence', dla_job_recurrence_sdk) as g:
+        g.command('show', 'get')
+        g.command('list', 'list')
 
     # account data source operations
-    cli_command(__name__, 'dla account blob-storage show', 
adla_format_path.format('account', 'storage_accounts_operations', 
'StorageAccountsOperations', 'get'), cf_dla_account_storage)
-    cli_command(__name__, 'dla account blob-storage add', 
adla_custom_format_path.format('add_adla_blob_storage'), cf_dla_account_storage)
-    cli_command(__name__, 'dla account blob-storage update', 
adla_custom_format_path.format('update_adla_blob_storage'), 
cf_dla_account_storage)
-    cli_command(__name__, 'dla account blob-storage delete', 
adla_format_path.format('account', 'storage_accounts_operations', 
'StorageAccountsOperations', 'delete'), cf_dla_account_storage)
-    cli_command(__name__, 'dla account blob-storage list', 
adla_format_path.format('account', 'storage_accounts_operations', 
'StorageAccountsOperations', 'list_by_account'), cf_dla_account_storage)
-
-    cli_command(__name__, 'dla account data-lake-store show', 
adla_format_path.format('account', 'data_lake_store_accounts_operations', 
'DataLakeStoreAccountsOperations', 'get'), cf_dla_account_adls)
-    cli_command(__name__, 'dla account data-lake-store list', 
adla_format_path.format('account', 'data_lake_store_accounts_operations', 
'DataLakeStoreAccountsOperations', 'list_by_account'), cf_dla_account_adls)
-    cli_command(__name__, 'dla account data-lake-store add', 
adla_format_path.format('account', 'data_lake_store_accounts_operations', 
'DataLakeStoreAccountsOperations', 'add'), cf_dla_account_adls)
-    cli_command(__name__, 'dla account data-lake-store delete', 
adla_format_path.format('account', 'data_lake_store_accounts_operations', 
'DataLakeStoreAccountsOperations', 'delete'), cf_dla_account_adls)
+    with self.command_group('dla account blob-storage', dla_storage_sdk, 
client_factory=cf_dla_account_storage) as g:
+        g.command('show', 'get')
+        g.custom_command('add', 'add_adla_blob_storage')
+        g.custom_command('update', 'update_adla_blob_storage')
+        g.command('delete', 'delete')
+        g.command('list', 'list_by_account')
+
+    with self.command_group('dla account data-lake-store', dla_dls_sdk) as g:
+        g.command('show', 'get')
+        g.command('list', 'list_by_account')
+        g.command('add', 'add')
+        g.command('delete', 'delete')
 
     # catalog operations
     # credential
-    cli_command(__name__, 'dla catalog credential create', 
adla_custom_format_path.format('create_adla_catalog_credential'), 
cf_dla_catalog)
-    cli_command(__name__, 'dla catalog credential show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_credential'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog credential update', 
adla_custom_format_path.format('update_adla_catalog_credential'), 
cf_dla_catalog)
-    cli_command(__name__, 'dla catalog credential list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_credentials'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog credential delete', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'delete_credential'), cf_dla_catalog)
+    with self.command_group('dla catalog credential', dla_catalog_sdk, 
client_factory=cf_dla_catalog) as g:
+        g.custom_command('create', 'create_adla_catalog_credential')
+        g.command('show', 'get_credential')
+        g.custom_command('update', 'update_adla_catalog_credential')
+        g.command('list', 'list_credentials')
+        g.command('delete', 'delete_credential')
 
     # database
-    cli_command(__name__, 'dla catalog database show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_database'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog database list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_databases'), cf_dla_catalog)
+    with self.command_group('dla catalog database', dla_catalog_sdk) as g:
+        g.command('show', 'get_database')
+        g.command('list', 'list_databases')
 
     # schema
-    cli_command(__name__, 'dla catalog schema show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_schema'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog schema list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_schemas'), cf_dla_catalog)
+    with self.command_group('dla catalog schema', dla_catalog_sdk) as g:
+        g.command('show', 'get_schema')
+        g.command('list', 'list_schemas')
 
     # table
-    cli_command(__name__, 'dla catalog table show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_table'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog table list', 
adla_custom_format_path.format('list_catalog_tables'), cf_dla_catalog)
+    with self.command_group('dla catalog table', dla_catalog_sdk, 
client_factory=cf_dla_catalog) as g:
+        g.command('show', 'get_table')
+        g.custom_command('list', 'list_catalog_tables')
 
     # assembly
-    cli_command(__name__, 'dla catalog assembly show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_assembly'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog assembly list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_assemblies'), cf_dla_catalog)
+    with self.command_group('dla catalog assembly', dla_catalog_sdk) as g:
+        g.command('show', 'get_assembly')
+        g.command('list', 'list_assemblies')
 
     # external data source
-    cli_command(__name__, 'dla catalog external-data-source show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_external_data_source'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog external-data-source list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_external_data_sources'), cf_dla_catalog)
+    with self.command_group('dla catalog external-data-source', 
dla_catalog_sdk) as g:
+        g.command('show', 'get_external_data_source')
+        g.command('list', 'list_external_data_sources')
 
     # get procedure
-    cli_command(__name__, 'dla catalog procedure show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_procedure'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog procedure list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_procedures'), cf_dla_catalog)
+    with self.command_group('dla catalog procedure', dla_catalog_sdk) as g:
+        g.command('show', 'get_procedure')
+        g.command('list', 'list_procedures')
 
     # get table partition
-    cli_command(__name__, 'dla catalog table-partition show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_table_partition'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog table-partition list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_table_partitions'), cf_dla_catalog)
+    with self.command_group('dla catalog table-partition', dla_catalog_sdk) as 
g:
+        g.command('show', 'get_table_partition')
+        g.command('list', 'list_table_partitions')
 
     # get table statistics
-    cli_command(__name__, 'dla catalog table-stats show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_table_statistic'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog table-stats list', 
adla_custom_format_path.format('list_catalog_table_statistics'), cf_dla_catalog)
+    with self.command_group('dla catalog table-stats', dla_catalog_sdk, 
client_factory=cf_dla_catalog) as g:
+        g.command('show', 'get_table_statistic')
+        g.custom_command('list', 'list_catalog_table_statistics')
 
     # get table types
-    cli_command(__name__, 'dla catalog table-type show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_table_type'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog table-type list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_table_types'), cf_dla_catalog)
+    with self.command_group('dla catalog table-type', dla_catalog_sdk) as g:
+        g.command('show', 'get_table_type')
+        g.command('list', 'list_table_types')
 
     # get table valued functions
-    cli_command(__name__, 'dla catalog tvf show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_table_valued_function'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog tvf list', 
adla_custom_format_path.format('list_catalog_tvfs'), cf_dla_catalog)
+    with self.command_group('dla catalog tvf', dla_catalog_sdk, 
client_factory=cf_dla_catalog) as g:
+        g.command('show', 'get_table_valued_function')
+        g.custom_command('list', 'list_catalog_tvfs')
 
     # get views
-    cli_command(__name__, 'dla catalog view show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_view'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog view list', 
adla_custom_format_path.format('list_catalog_views'), cf_dla_catalog)
+    with self.command_group('dla catalog view', dla_catalog_sdk, 
client_factory=cf_dla_catalog) as g:
+        g.command('show', 'get_view')
+        g.custom_command('list', 'list_catalog_views')
 
     # get packages
-    cli_command(__name__, 'dla catalog package show', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'get_package'), cf_dla_catalog)
-    cli_command(__name__, 'dla catalog package list', 
adla_format_path.format('catalog', 'catalog_operations', 'CatalogOperations', 
'list_packages'), cf_dla_catalog)
+    with self.command_group('dla catalog package', dla_catalog_sdk) as g:
+        g.command('show', 'get_package')
+        g.command('list', 'list_packages')
 
     # compute policy
-    cli_command(__name__, 'dla account compute-policy create', 
adla_custom_format_path.format('create_adla_compute_policy'), 
cf_dla_account_compute_policy)
-    cli_command(__name__, 'dla account compute-policy update', 
adla_custom_format_path.format('update_adla_compute_policy'), 
cf_dla_account_compute_policy)
-    cli_command(__name__, 'dla account compute-policy list', 
adla_format_path.format('account', 'compute_policies_operations', 
'ComputePoliciesOperations', 'list_by_account'), cf_dla_account_compute_policy)
-    cli_command(__name__, 'dla account compute-policy show', 
adla_format_path.format('account', 'compute_policies_operations', 
'ComputePoliciesOperations', 'get'), cf_dla_account_compute_policy)
-    cli_command(__name__, 'dla account compute-policy delete', 
adla_format_path.format('account', 'compute_policies_operations', 
'ComputePoliciesOperations', 'delete'), cf_dla_account_compute_policy)
+    with self.command_group('dla account compute-policy', 
dla_compute_policy_sdk, client_factory=cf_dla_account_compute_policy) as g:
+        g.custom_command('create', 'create_adla_compute_policy')
+        g.custom_command('update', 'update_adla_compute_policy')
+        g.command('list', 'list_by_account')
+        g.command('show', 'get')
+        g.command('delete', 'delete')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/custom.py 
new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/custom.py
--- old/azure-cli-dla-0.0.12/azure/cli/command_modules/dla/custom.py    
2017-09-23 01:47:00.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure/cli/command_modules/dla/custom.py    
2018-01-26 17:12:23.000000000 +0100
@@ -5,46 +5,25 @@
 import time
 import uuid
 
-from azure.cli.core.prompting import prompt_pass, NoTTYException
-from azure.mgmt.datalake.analytics.account.models import 
(DataLakeAnalyticsAccountUpdateParameters,
-                                                          FirewallRule,
-                                                          
DataLakeAnalyticsAccount,
-                                                          
DataLakeStoreAccountInfo,
-                                                          
ComputePolicyCreateOrUpdateParameters)
-
-from azure.mgmt.datalake.analytics.job.models import (JobType,
-                                                      JobState,
-                                                      JobInformation,
-                                                      USqlJobProperties,
-                                                      
JobRelationshipProperties)
+from knack.log import get_logger
+from knack.prompting import prompt_pass, NoTTYException
+from knack.util import CLIError
+
 # pylint: disable=line-too-long
-from azure.mgmt.datalake.analytics.catalog.models import 
(DataLakeAnalyticsCatalogCredentialCreateParameters,
-                                                          
DataLakeAnalyticsCatalogCredentialUpdateParameters)
 from azure.cli.core.commands.client_factory import get_mgmt_service_client
-from azure.cli.core.util import CLIError
-import azure.cli.core.azlogging as azlogging
 
-logger = azlogging.get_az_logger(__name__)
+logger = get_logger(__name__)
 
 
-# account customiaztions
+# region account
 def list_adla_account(client, resource_group_name=None):
     account_list = 
client.list_by_resource_group(resource_group_name=resource_group_name) \
         if resource_group_name else client.list()
     return list(account_list)
 
 
-def list_adla_jobs(client,
-                   account_name,
-                   top=500,
-                   name=None,
-                   submitter=None,
-                   submitted_after=None,
-                   submitted_before=None,
-                   state=None,
-                   result=None,
-                   pipeline_id=None,
-                   recurrence_id=None):
+def list_adla_jobs(client, account_name, top=500, name=None, submitter=None, 
submitted_after=None,
+                   submitted_before=None, state=None, result=None, 
pipeline_id=None, recurrence_id=None):
     odata_filter_list = []
     if submitter:
         odata_filter_list.append("submitter eq '{}'".format(submitter))
@@ -85,19 +64,12 @@
     return to_return
 
 
-def create_adla_account(client,
-                        resource_group_name,
-                        account_name,
-                        default_data_lake_store,
-                        location=None,
-                        tags=None,
-                        max_degree_of_parallelism=30,
-                        max_job_count=3,
-                        query_store_retention=30,
-                        tier=None):
+def create_adla_account(cmd, client, resource_group_name, account_name, 
default_data_lake_store, location=None,
+                        tags=None, max_degree_of_parallelism=30, 
max_job_count=3, query_store_retention=30, tier=None):
+    from azure.mgmt.datalake.analytics.account.models import 
DataLakeAnalyticsAccount, DataLakeStoreAccountInfo
     adls_list = list()
     adls_list.append(DataLakeStoreAccountInfo(default_data_lake_store))
-    location = location or _get_resource_group_location(resource_group_name)
+    location = location or _get_resource_group_location(cmd.cli_ctx, 
resource_group_name)
     create_params = DataLakeAnalyticsAccount(location,
                                              default_data_lake_store,
                                              adls_list,
@@ -110,16 +82,10 @@
     return client.create(resource_group_name, account_name, 
create_params).result()
 
 
-def update_adla_account(client,
-                        account_name,
-                        resource_group_name,
-                        tags=None,
-                        max_degree_of_parallelism=None,
-                        max_job_count=None,
-                        query_store_retention=None,
-                        tier=None,
-                        firewall_state=None,
+def update_adla_account(client, account_name, resource_group_name, tags=None, 
max_degree_of_parallelism=None,
+                        max_job_count=None, query_store_retention=None, 
tier=None, firewall_state=None,
                         allow_azure_ips=None):
+    from azure.mgmt.datalake.analytics.account.models import 
DataLakeAnalyticsAccountUpdateParameters
     update_params = DataLakeAnalyticsAccountUpdateParameters(
         tags=tags,
         max_degree_of_parallelism=max_degree_of_parallelism,
@@ -130,15 +96,11 @@
         firewall_allow_azure_ips=allow_azure_ips)
 
     return client.update(resource_group_name, account_name, 
update_params).result()
+# endregion
 
 
-# storage customizations
-def add_adla_blob_storage(client,
-                          account_name,
-                          storage_account_name,
-                          access_key,
-                          resource_group_name,
-                          suffix=None):
+# region storage
+def add_adla_blob_storage(client, account_name, storage_account_name, 
access_key, resource_group_name, suffix=None):
     return client.add(resource_group_name,
                       account_name,
                       storage_account_name,
@@ -146,42 +108,31 @@
                       suffix)
 
 
-def update_adla_blob_storage(client,
-                             account_name,
-                             storage_account_name,
-                             access_key,
-                             resource_group_name,
-                             suffix=None):
+def update_adla_blob_storage(client, account_name, storage_account_name, 
access_key, resource_group_name, suffix=None):
     return client.update(resource_group_name,
                          account_name,
                          storage_account_name,
                          access_key,
                          suffix)
+# endregion
 
 
-# firewall customizations
-def add_adla_firewall_rule(client,
-                           account_name,
-                           firewall_rule_name,
-                           start_ip_address,
-                           end_ip_address,
+# region firewall
+def add_adla_firewall_rule(client, account_name, firewall_rule_name, 
start_ip_address, end_ip_address,
                            resource_group_name):
+    from azure.mgmt.datalake.analytics.account.models import FirewallRule
     create_params = FirewallRule(start_ip_address, end_ip_address)
     return client.create_or_update(resource_group_name,
                                    account_name,
                                    firewall_rule_name,
                                    create_params)
+# endregion
 
 
-# compute policy customizations
-def create_adla_compute_policy(client,
-                               account_name,
-                               compute_policy_name,
-                               object_id,
-                               object_type,
-                               resource_group_name,
-                               max_dop_per_job=None,
-                               min_priority_per_job=None):
+# region compute policy
+def create_adla_compute_policy(client, account_name, compute_policy_name, 
object_id, object_type,
+                               resource_group_name, max_dop_per_job=None, 
min_priority_per_job=None):
+    from azure.mgmt.datalake.analytics.account.models import 
ComputePolicyCreateOrUpdateParameters
     if not max_dop_per_job and not min_priority_per_job:
         raise CLIError('Please specify at least one of --max-dop-per-job and 
--min-priority-per-job')
 
@@ -200,12 +151,8 @@
                                    create_params)
 
 
-def update_adla_compute_policy(client,
-                               account_name,
-                               compute_policy_name,
-                               resource_group_name,
-                               max_dop_per_job=None,
-                               min_priority_per_job=None):
+def update_adla_compute_policy(client, account_name, compute_policy_name, 
resource_group_name,
+                               max_dop_per_job=None, 
min_priority_per_job=None):
     if not max_dop_per_job and not min_priority_per_job:
         raise CLIError('Please specify at least one of --max-dop-per-job and 
--min-priority-per-job')
 
@@ -220,17 +167,13 @@
                          compute_policy_name,
                          max_dop_per_job,
                          min_priority_per_job)
+# endregion
 
 
-# catalog customizations
-def create_adla_catalog_credential(client,
-                                   account_name,
-                                   database_name,
-                                   credential_name,
-                                   credential_user_name,
-                                   uri,
+# region catalog
+def create_adla_catalog_credential(client, account_name, database_name, 
credential_name, credential_user_name, uri,
                                    credential_user_password=None):
-
+    from azure.mgmt.datalake.analytics.catalog.models import 
DataLakeAnalyticsCatalogCredentialCreateParameters
     if not credential_user_password:
         try:
             credential_user_password = prompt_pass('Password:', confirm=True)
@@ -244,14 +187,9 @@
     client.create_credential(account_name, database_name, credential_name, 
create_params)
 
 
-def update_adla_catalog_credential(client,
-                                   account_name,
-                                   database_name,
-                                   credential_name,
-                                   credential_user_name,
-                                   uri,
-                                   credential_user_password=None,
-                                   new_credential_user_password=None):
+def update_adla_catalog_credential(client, account_name, database_name, 
credential_name, credential_user_name, uri,
+                                   credential_user_password=None, 
new_credential_user_password=None):
+    from azure.mgmt.datalake.analytics.catalog.models import 
DataLakeAnalyticsCatalogCredentialUpdateParameters
     if not credential_user_password:
         try:
             credential_user_password = prompt_pass('Current Password:', 
confirm=True)
@@ -271,44 +209,32 @@
                                                                        uri,
                                                                        
credential_user_name)
     client.update_credential(account_name, database_name, credential_name, 
update_params)
+# endregion
 
 
-# customizations for listing catalog items that support multiple ancestor 
levels of listing.
-def list_catalog_tables(client,
-                        account_name,
-                        database_name,
-                        schema_name=None):
+# region catalog lists
+def list_catalog_tables(client, account_name, database_name, schema_name=None):
     if not schema_name:
         return client.list_tables_by_database(account_name, database_name)
 
     return client.list_tables(account_name, database_name, schema_name)
 
 
-def list_catalog_views(client,
-                       account_name,
-                       database_name,
-                       schema_name=None):
+def list_catalog_views(client, account_name, database_name, schema_name=None):
     if not schema_name:
         return client.list_views_by_database(account_name, database_name)
 
     return client.list_views(account_name, database_name, schema_name)
 
 
-def list_catalog_tvfs(client,
-                      account_name,
-                      database_name,
-                      schema_name=None):
+def list_catalog_tvfs(client, account_name, database_name, schema_name=None):
     if not schema_name:
         return client.list_table_valued_functions_by_database(account_name, 
database_name)
 
     return client.list_table_valued_functions(account_name, database_name, 
schema_name)
 
 
-def list_catalog_table_statistics(client,
-                                  account_name,
-                                  database_name,
-                                  schema_name=None,
-                                  table_name=None):
+def list_catalog_table_statistics(client, account_name, database_name, 
schema_name=None, table_name=None):
     if not schema_name and table_name:
         logger.warning('--table-name must be specified with --schema-name to 
be used. Defaulting to list all statistics in the database: %s', database_name)
 
@@ -319,61 +245,54 @@
         return 
client.list_table_statistics_by_database_and_schema(account_name, 
database_name, schema_name)
 
     return client.list_table_statistics(account_name, database_name, 
schema_name, table_name)
+# endregion
 
 
-# job customizations
-def submit_adla_job(client,
-                    account_name,
-                    job_name,
-                    script,
-                    runtime_version=None,
-                    compile_mode=None,
-                    compile_only=False,
-                    degree_of_parallelism=1,
-                    priority=1000,
-                    recurrence_id=None,
-                    recurrence_name=None,
-                    pipeline_id=None,
-                    pipeline_name=None,
-                    pipeline_uri=None,
-                    run_id=None):
+# region job
+def submit_adla_job(client, account_name, job_name, script, 
runtime_version=None, compile_mode=None, compile_only=False,
+                    degree_of_parallelism=1, priority=1000, 
recurrence_id=None, recurrence_name=None, pipeline_id=None,
+                    pipeline_name=None, pipeline_uri=None, run_id=None):
+    from azure.mgmt.datalake.analytics.job.models import (
+        JobType, CreateJobParameters, BuildJobParameters, 
CreateUSqlJobProperties, JobRelationshipProperties)
+
     if not script or len(script) < 1:
         # pylint: disable=line-too-long
         raise CLIError('Could not read script content from the supplied 
--script param. It is either empty or an invalid file')
 
-    job_properties = USqlJobProperties(script)
+    job_properties = CreateUSqlJobProperties(script)
     if runtime_version:
         job_properties.runtime_version = runtime_version
 
     if compile_mode:
         job_properties.compile_mode = compile_mode
 
-    submit_params = JobInformation(job_name,
-                                   JobType.usql,
-                                   job_properties,
-                                   degree_of_parallelism,
-                                   priority)
+    if compile_only:
+        build_params = BuildJobParameters(JobType.usql,
+                                          job_properties,
+                                          job_name)
+
+        return client.build(account_name, build_params)
+
+    create_params = CreateJobParameters(JobType.usql,
+                                        job_properties,
+                                        job_name,
+                                        degree_of_parallelism,
+                                        priority)
     if recurrence_id:
-        submit_params.related = JobRelationshipProperties(recurrence_id,
+        create_params.related = JobRelationshipProperties(recurrence_id,
                                                           pipeline_id,
                                                           pipeline_name,
                                                           pipeline_uri,
                                                           run_id,
                                                           recurrence_name)
 
-    if compile_only:
-        return client.build(account_name, submit_params)
-
     job_id = _get_uuid_str()
 
-    return client.create(account_name, job_id, submit_params)
+    return client.create(account_name, job_id, create_params)
 
 
-def wait_adla_job(client,
-                  account_name,
-                  job_id,
-                  wait_interval_sec=5,
-                  max_wait_time_sec=-1):
+def wait_adla_job(client, account_name, job_id, wait_interval_sec=5, 
max_wait_time_sec=-1):
+    from azure.mgmt.datalake.analytics.job.models import JobState
     if wait_interval_sec < 1:
         raise CLIError('wait times must be greater than 0 when polling jobs. 
Value specified: {}'
                        .format(wait_interval_sec))
@@ -389,6 +308,7 @@
         job = client.get(account_name, job_id)
 
     return job
+# endregion
 
 
 # helpers
@@ -396,8 +316,8 @@
     return str(uuid.uuid1())
 
 
-def _get_resource_group_location(resource_group_name):
+def _get_resource_group_location(cli_ctx, resource_group_name):
     from azure.mgmt.resource import ResourceManagementClient
-    client = get_mgmt_service_client(ResourceManagementClient)
+    client = get_mgmt_service_client(cli_ctx, ResourceManagementClient)
     # pylint: disable=no-member
     return client.resource_groups.get(resource_group_name).location
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-cli-dla-0.0.12/azure_cli_dla.egg-info/PKG-INFO 
new/azure-cli-dla-0.0.18/azure_cli_dla.egg-info/PKG-INFO
--- old/azure-cli-dla-0.0.12/azure_cli_dla.egg-info/PKG-INFO    2017-09-23 
01:48:50.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure_cli_dla.egg-info/PKG-INFO    2018-01-26 
17:12:45.000000000 +0100
@@ -1,12 +1,11 @@
 Metadata-Version: 1.1
 Name: azure-cli-dla
-Version: 0.0.12
+Version: 0.0.18
 Summary: Microsoft Azure Command-Line Tools Data Lake Analytics Command Module
 Home-page: https://github.com/Azure/azure-cli
 Author: Microsoft Corporation
 Author-email: azpy...@microsoft.com
 License: MIT
-Description-Content-Type: UNKNOWN
 Description: Microsoft Azure CLI 'data lake analytics' Command Module
         ========================================================
         
@@ -20,6 +19,33 @@
         
         Release History
         ===============
+        
+        0.0.18
+        ++++++
+        * Performance fixes.
+        
+        0.0.17
+        ++++++
+        * Update helpfile
+          
+        0.0.16
+        ++++++
+        * Update for CLI core changes.
+        
+        0.0.15
+        ++++++
+        * Change the return type of the job list command: a list of 
JobInformation to a list of JobInformationBasic
+        * Change the return type of the account list command: a list of 
DataLakeAnalyticsAccount to a list of DataLakeAnalyticsAccountBasic
+        * The properties of a Basic type is a strict subset of the properties 
of a regular type
+        
+        0.0.14
+        ++++++
+        * Minor fixes.
+        
+        0.0.13
+        ++++++
+        * minor fixes
+        
         0.0.12 (2017-09-22)
         +++++++++++++++++++
         * minor fixes
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-cli-dla-0.0.12/azure_cli_dla.egg-info/requires.txt 
new/azure-cli-dla-0.0.18/azure_cli_dla.egg-info/requires.txt
--- old/azure-cli-dla-0.0.12/azure_cli_dla.egg-info/requires.txt        
2017-09-23 01:48:50.000000000 +0200
+++ new/azure-cli-dla-0.0.18/azure_cli_dla.egg-info/requires.txt        
2018-01-26 17:12:45.000000000 +0100
@@ -1,4 +1,3 @@
-azure-mgmt-datalake-store==0.1.6
-azure-mgmt-datalake-analytics==0.1.6
+azure-mgmt-datalake-store==0.2.0
+azure-mgmt-datalake-analytics==0.2.0
 azure-cli-core
-azure-cli-command-modules-nspkg>=2.0.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-cli-dla-0.0.12/setup.py 
new/azure-cli-dla-0.0.18/setup.py
--- old/azure-cli-dla-0.0.12/setup.py   2017-09-23 01:48:50.000000000 +0200
+++ new/azure-cli-dla-0.0.18/setup.py   2018-01-26 17:12:23.000000000 +0100
@@ -16,7 +16,7 @@
     cmdclass = {}
 
 
-VERSION = "0.0.12"
+VERSION = "0.0.18"
 # The full list of classifiers is available at
 # https://pypi.python.org/pypi?%3Aaction=list_classifiers
 CLASSIFIERS = [
@@ -34,8 +34,8 @@
 ]
 
 DEPENDENCIES = [
-    'azure-mgmt-datalake-store==0.1.6',
-    'azure-mgmt-datalake-analytics==0.1.6',
+    'azure-mgmt-datalake-store==0.2.0',
+    'azure-mgmt-datalake-analytics==0.2.0',
     'azure-cli-core',
 ]
 


Reply via email to