Chad Smith has proposed merging ~chad.smith/cloud-init:cleanup/metadata-cloud-platform into cloud-init:master.
Commit message: ec2: update crawled metadata. Add standardized keys * refactor _crawl_metadata to return a dictionary * refactor _get_data to process crawled_data and cache ds._crawled_metadata * add v1.ssh_public_keys and v1.platform_time standardized instance data keys * drop ec2.cloud_platform method from aliyun and ec2 datasources - instead implement _get_cloud_name method Requested reviews: cloud-init commiters (cloud-init-dev) For more details, see: https://code.launchpad.net/~chad.smith/cloud-init/+git/cloud-init/+merge/355999 -- Your team cloud-init commiters is requested to review the proposed merge of ~chad.smith/cloud-init:cleanup/metadata-cloud-platform into cloud-init:master.
diff --git a/cloudinit/sources/DataSourceAliYun.py b/cloudinit/sources/DataSourceAliYun.py index 858e082..38ec44a 100644 --- a/cloudinit/sources/DataSourceAliYun.py +++ b/cloudinit/sources/DataSourceAliYun.py @@ -12,6 +12,7 @@ ALIYUN_PRODUCT = "Alibaba Cloud ECS" class DataSourceAliYun(EC2.DataSourceEc2): dsname = 'AliYun' + _platform_type = 'ec2' metadata_urls = ['http://100.100.100.200'] # The minimum supported metadata_version from the ec2 metadata apis @@ -28,15 +29,11 @@ class DataSourceAliYun(EC2.DataSourceEc2): def get_public_ssh_keys(self): return parse_public_keys(self.metadata.get('public-keys', {})) - @property - def cloud_platform(self): - if self._cloud_platform is None: - if _is_aliyun(): - self._cloud_platform = EC2.Platforms.ALIYUN - else: - self._cloud_platform = EC2.Platforms.NO_EC2_METADATA - - return self._cloud_platform + def _get_cloud_name(self): + if _is_aliyun(): + return EC2.CloudNames.ALIYUN + else: + return EC2.CloudNames.NO_EC2_METADATA def _is_aliyun(): diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 968ab3f..482eb21 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -28,18 +28,17 @@ STRICT_ID_PATH = ("datasource", "Ec2", "strict_id") STRICT_ID_DEFAULT = "warn" -class Platforms(object): - # TODO Rename and move to cloudinit.cloud.CloudNames - ALIYUN = "AliYun" - AWS = "AWS" - BRIGHTBOX = "Brightbox" - SEEDED = "Seeded" +class CloudNames(object): + ALIYUN = "aliyun" + AWS = "aws" + BRIGHTBOX = "brightbox" + SEEDED = "ec2-Seeded" # UNKNOWN indicates no positive id. If strict_id is 'warn' or 'false', # then an attempt at the Ec2 Metadata service will be made. - UNKNOWN = "Unknown" + UNKNOWN = "unknown" # NO_EC2_METADATA indicates this platform does not have a Ec2 metadata # service available. No attempt at the Ec2 Metadata service will be made. - NO_EC2_METADATA = "No-EC2-Metadata" + NO_EC2_METADATA = "no-ec2-metadata" class DataSourceEc2(sources.DataSource): @@ -61,8 +60,6 @@ class DataSourceEc2(sources.DataSource): url_max_wait = 120 url_timeout = 50 - _cloud_platform = None - _network_config = sources.UNSET # Used to cache calculated network cfg v1 # Whether we want to get network configuration from the metadata service. @@ -75,7 +72,7 @@ class DataSourceEc2(sources.DataSource): def _get_cloud_name(self): """Return the cloud name as identified during _get_data.""" - return self.cloud_platform + return identify_platform() def _get_data(self): seed_ret = {} @@ -83,7 +80,7 @@ class DataSourceEc2(sources.DataSource): self.userdata_raw = seed_ret['user-data'] self.metadata = seed_ret['meta-data'] LOG.debug("Using seeded ec2 data from %s", self.seed_dir) - self._cloud_platform = Platforms.SEEDED + self._cloud_name = CloudNames.SEEDED return True strict_mode, _sleep = read_strict_mode( @@ -91,10 +88,10 @@ class DataSourceEc2(sources.DataSource): STRICT_ID_DEFAULT), ("warn", None)) LOG.debug("strict_mode: %s, cloud_platform=%s", - strict_mode, self.cloud_platform) - if strict_mode == "true" and self.cloud_platform == Platforms.UNKNOWN: + strict_mode, self.cloud_name) + if strict_mode == "true" and self.cloud_name == CloudNames.UNKNOWN: return False - elif self.cloud_platform == Platforms.NO_EC2_METADATA: + elif self.cloud_name == CloudNames.NO_EC2_METADATA: return False if self.perform_dhcp_setup: # Setup networking in init-local stage. @@ -103,13 +100,20 @@ class DataSourceEc2(sources.DataSource): return False try: with EphemeralDHCPv4(self.fallback_interface): - return util.log_time( + self._crawled_metadata = util.log_time( logfunc=LOG.debug, msg='Crawl of metadata service', - func=self._crawl_metadata) + func=self.crawl_metadata) except NoDHCPLeaseError: return False else: - return self._crawl_metadata() + self._crawled_metadata = self.crawl_metadata() + if not self._crawled_metadata: + return False + self.metadata = self._crawled_metadata.get('meta-data', None) + self.userdata_raw = self._crawled_metadata.get('user-data', None) + self.identity = self._crawled_metadata.get( + 'dynamic', {}).get('instance-identity', {}).get('document', {}) + return True @property def launch_index(self): @@ -144,7 +148,7 @@ class DataSourceEc2(sources.DataSource): return self.min_metadata_version def get_instance_id(self): - if self.cloud_platform == Platforms.AWS: + if self.cloud_name == CloudNames.AWS: # Prefer the ID from the instance identity document, but fall back if not getattr(self, 'identity', None): # If re-using cached datasource, it's get_data run didn't @@ -254,7 +258,7 @@ class DataSourceEc2(sources.DataSource): @property def availability_zone(self): try: - if self.cloud_platform == Platforms.AWS: + if self.cloud_name == CloudNames.AWS: return self.identity.get( 'availabilityZone', self.metadata['placement']['availability-zone']) @@ -265,7 +269,7 @@ class DataSourceEc2(sources.DataSource): @property def region(self): - if self.cloud_platform == Platforms.AWS: + if self.cloud_name == CloudNames.AWS: region = self.identity.get('region') # Fallback to trimming the availability zone if region is missing if self.availability_zone and not region: @@ -277,16 +281,10 @@ class DataSourceEc2(sources.DataSource): return az[:-1] return None - @property - def cloud_platform(self): # TODO rename cloud_name - if self._cloud_platform is None: - self._cloud_platform = identify_platform() - return self._cloud_platform - def activate(self, cfg, is_new_instance): if not is_new_instance: return - if self.cloud_platform == Platforms.UNKNOWN: + if self.cloud_name == CloudNames.UNKNOWN: warn_if_necessary( util.get_cfg_by_path(cfg, STRICT_ID_PATH, STRICT_ID_DEFAULT), cfg) @@ -306,13 +304,11 @@ class DataSourceEc2(sources.DataSource): result = None no_network_metadata_on_aws = bool( 'network' not in self.metadata and - self.cloud_platform == Platforms.AWS) + self.cloud_name == CloudNames.AWS) if no_network_metadata_on_aws: LOG.debug("Metadata 'network' not present:" " Refreshing stale metadata from prior to upgrade.") - util.log_time( - logfunc=LOG.debug, msg='Re-crawl of metadata service', - func=self._crawl_metadata) + self.get_data() # Limit network configuration to only the primary/fallback nic iface = self.fallback_interface @@ -340,28 +336,33 @@ class DataSourceEc2(sources.DataSource): return super(DataSourceEc2, self).fallback_interface return self._fallback_interface - def _crawl_metadata(self): + def crawl_metadata(self): """Crawl metadata service when available. - @returns: True on success, False otherwise. + @returns: Dictionary of craweled metadata content containing the keys: + meta-data, user-data and dynamic. """ + userdata = metadata = None if not self.wait_for_metadata_service(): - return False + return {} api_version = self.get_metadata_api_version() + crawled_metadata = {} try: - self.userdata_raw = ec2.get_instance_userdata( + crawled_metadata['user-data'] = ec2.get_instance_userdata( api_version, self.metadata_address) - self.metadata = ec2.get_instance_metadata( + crawled_metadata['meta-data'] = ec2.get_instance_metadata( api_version, self.metadata_address) - if self.cloud_platform == Platforms.AWS: - self.identity = ec2.get_instance_identity( - api_version, self.metadata_address).get('document', {}) + if self.cloud_name == CloudNames.AWS: + identity = ec2.get_instance_identity( + api_version, self.metadata_address) + crawled_metadata['dynamic'] = {'instance-identity': identity} except Exception: util.logexc( LOG, "Failed reading from metadata address %s", self.metadata_address) - return False - return True + return {} + crawled_metadata['_metadata_api_version'] = api_version + return crawled_metadata class DataSourceEc2Local(DataSourceEc2): @@ -375,10 +376,10 @@ class DataSourceEc2Local(DataSourceEc2): perform_dhcp_setup = True # Use dhcp before querying metadata def get_data(self): - supported_platforms = (Platforms.AWS,) - if self.cloud_platform not in supported_platforms: + supported_platforms = (CloudNames.AWS,) + if self.cloud_name not in supported_platforms: LOG.debug("Local Ec2 mode only supported on %s, not %s", - supported_platforms, self.cloud_platform) + supported_platforms, self.cloud_name) return False return super(DataSourceEc2Local, self).get_data() @@ -439,20 +440,20 @@ def identify_aws(data): if (data['uuid'].startswith('ec2') and (data['uuid_source'] == 'hypervisor' or data['uuid'] == data['serial'])): - return Platforms.AWS + return CloudNames.AWS return None def identify_brightbox(data): if data['serial'].endswith('brightbox.com'): - return Platforms.BRIGHTBOX + return CloudNames.BRIGHTBOX def identify_platform(): - # identify the platform and return an entry in Platforms. + # identify the platform and return an entry in CloudNames. data = _collect_platform_data() - checks = (identify_aws, identify_brightbox, lambda x: Platforms.UNKNOWN) + checks = (identify_aws, identify_brightbox, lambda x: CloudNames.UNKNOWN) for checker in checks: try: result = checker(data) diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 5ac9882..e772fe4 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -133,6 +133,9 @@ class DataSource(object): # Cached cloud_name as determined by _get_cloud_name _cloud_name = None + # The cloud platform api type: e.g. ec2, openstack, kvm, lxc + _platform_type = None + # Track the discovered fallback nic for use in configuration generation. _fallback_interface = None @@ -192,16 +195,14 @@ class DataSource(object): local_hostname = self.get_hostname() instance_id = self.get_instance_id() availability_zone = self.availability_zone - cloud_name = self.cloud_name - # When adding new standard keys prefer underscore-delimited instead - # of hyphen-delimted to support simple variable references in jinja - # templates. return { 'v1': { 'availability-zone': availability_zone, 'availability_zone': availability_zone, - 'cloud-name': cloud_name, - 'cloud_name': cloud_name, + 'cloud-name': self.cloud_name, + 'cloud_name': self.cloud_name, + 'platform_type': self.platform_type, + 'public_ssh_keys': self.get_public_ssh_keys(), 'instance-id': instance_id, 'instance_id': instance_id, 'local-hostname': local_hostname, @@ -247,19 +248,26 @@ class DataSource(object): @return True on successful write, False otherwise. """ - instance_data = { - 'ds': {'_doc': EXPERIMENTAL_TEXT, - 'meta_data': self.metadata}} - if hasattr(self, 'network_json'): - network_json = getattr(self, 'network_json') - if network_json != UNSET: - instance_data['ds']['network_json'] = network_json - if hasattr(self, 'ec2_metadata'): - ec2_metadata = getattr(self, 'ec2_metadata') - if ec2_metadata != UNSET: - instance_data['ds']['ec2_metadata'] = ec2_metadata + if hasattr(self, '_crawled_metadata'): + # Any datasource with _crawled_metadata will best represent + # most recent, 'raw' metadata + crawled_metadata = copy.deepcopy(self._crawled_metadata) + crawled_metadata.pop('user-data', None) + crawled_metadata.pop('vendor-data', None) + instance_data = {'ds': crawled_metadata} + else: + instance_data = {'ds': {'meta_data': self.metadata}} + if hasattr(self, 'network_json'): + network_json = getattr(self, 'network_json') + if network_json != UNSET: + instance_data['ds']['network_json'] = network_json + if hasattr(self, 'ec2_metadata'): + ec2_metadata = getattr(self, 'ec2_metadata') + if ec2_metadata != UNSET: + instance_data['ds']['ec2_metadata'] = ec2_metadata instance_data.update( self._get_standardized_metadata()) + instance_data['ds']['_doc'] = EXPERIMENTAL_TEXT try: # Process content base64encoding unserializable values content = util.json_dumps(instance_data) @@ -347,6 +355,12 @@ class DataSource(object): return self._fallback_interface @property + def platform_type(self): + if self._platform_type: + return self._platform_type + return self.dsname.lower() + + @property def cloud_name(self): """Return lowercase cloud name as determined by the datasource. diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py index 8082019..d40a6e8 100644 --- a/cloudinit/sources/tests/test_init.py +++ b/cloudinit/sources/tests/test_init.py @@ -303,6 +303,8 @@ class TestDataSource(CiTestCase): 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', + 'platform_type': 'mytestsubclass', + 'public_ssh_keys': [], 'region': 'myregion'}, 'ds': { '_doc': EXPERIMENTAL_TEXT, @@ -347,6 +349,8 @@ class TestDataSource(CiTestCase): 'instance_id': 'iid-datasource', 'local-hostname': 'test-subclass-hostname', 'local_hostname': 'test-subclass-hostname', + 'platform_type': 'mytestsubclass', + 'public_ssh_keys': [], 'region': 'myregion'}, 'ds': { '_doc': EXPERIMENTAL_TEXT, @@ -356,7 +360,6 @@ class TestDataSource(CiTestCase): 'region': 'myregion', 'some': {'security-credentials': REDACT_SENSITIVE_VALUE}}} } - self.maxDiff = None self.assertEqual(expected, util.load_json(content)) file_stat = os.stat(sensitive_json_file) self.assertEqual(0o600, stat.S_IMODE(file_stat.st_mode)) diff --git a/doc/rtd/topics/instancedata.rst b/doc/rtd/topics/instancedata.rst index 634e180..c1eed2e 100644 --- a/doc/rtd/topics/instancedata.rst +++ b/doc/rtd/topics/instancedata.rst @@ -102,6 +102,12 @@ The standardized keys present: | v1.local_hostname | The internal or local hostname of the system | ip-10-41-41-70, | | | | <user-provided-hostname> | +----------------------+-----------------------------------------------+---------------------------+ +| v1.platform_type | The cloud platform or metadata api type | ec2, openstack, seed-dir | +| | | | ++----------------------+-----------------------------------------------+---------------------------+ +| v1.public_ssh_keys | A list of ssh keys provided to the instance | ['ssh-rsa AA...', ...] | +| | by the datasource metadata. | | ++----------------------+-----------------------------------------------+---------------------------+ | v1.region | The physical region/datacenter in which the | us-east-2 | | | instance is deployed | | +----------------------+-----------------------------------------------+---------------------------+ @@ -117,10 +123,10 @@ instance: { "base64_encoded_keys": [], - "sensitive_keys": [], "ds": { + "_doc": "EXPERIMENTAL: The structure and format of content scoped under the 'ds' key may change in subsequent releases of cloud-init.", "meta_data": { - "ami-id": "ami-014e1416b628b0cbf", + "ami-id": "ami-0ea031f8786f57527", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { @@ -129,31 +135,31 @@ instance: "ephemeral1": "sdc", "root": "/dev/sda1" }, - "hostname": "ip-10-41-41-70.us-east-2.compute.internal", + "hostname": "ip-10-41-41-153.us-east-2.compute.internal", "instance-action": "none", - "instance-id": "i-04fa31cfc55aa7976", + "instance-id": "i-026b5bf335079dfa6", "instance-type": "t2.micro", - "local-hostname": "ip-10-41-41-70.us-east-2.compute.internal", - "local-ipv4": "10.41.41.70", - "mac": "06:b6:92:dd:9d:24", + "local-hostname": "ip-10-41-41-153.us-east-2.compute.internal", + "local-ipv4": "10.41.41.153", + "mac": "06:97:5c:d1:35:e8", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { - "06:b6:92:dd:9d:24": { + "06:97:5c:d1:35:e8": { "device-number": "0", - "interface-id": "eni-08c0c9fdb99b6e6f4", + "interface-id": "eni-05ec416d627d96e3b", "ipv4-associations": { - "18.224.22.43": "10.41.41.70" + "18.224.169.56": "10.41.41.153" }, - "local-hostname": "ip-10-41-41-70.us-east-2.compute.internal", - "local-ipv4s": "10.41.41.70", - "mac": "06:b6:92:dd:9d:24", + "local-hostname": "ip-10-41-41-153.us-east-2.compute.internal", + "local-ipv4s": "10.41.41.153", + "mac": "06:97:5c:d1:35:e8", "owner-id": "437526006925", - "public-hostname": "ec2-18-224-22-43.us-east-2.compute.amazonaws.com", - "public-ipv4s": "18.224.22.43", + "public-hostname": "ec2-18-224-169-56.us-east-2.compute.amazonaws.com", + "public-ipv4s": "18.224.169.56", "security-group-ids": "sg-828247e9", "security-groups": "Cloud-init integration test secgroup", "subnet-id": "subnet-282f3053", @@ -171,16 +177,14 @@ instance: "availability-zone": "us-east-2b" }, "profile": "default-hvm", - "public-hostname": "ec2-18-224-22-43.us-east-2.compute.amazonaws.com", - "public-ipv4": "18.224.22.43", + "public-hostname": "ec2-18-224-169-56.us-east-2.compute.amazonaws.com", + "public-ipv4": "18.224.169.56", "public-keys": { "cloud-init-integration": [ - "ssh-rsa - AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB - cloud-init-integration" + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB cloud-init-integration" ] }, - "reservation-id": "r-06ab75e9346f54333", + "reservation-id": "r-0e848e8c38328dbc2", "security-groups": "Cloud-init integration test secgroup", "services": { "domain": "amazonaws.com", @@ -188,20 +192,22 @@ instance: } } }, + "sensitive_keys": [], "v1": { - "availability-zone": "us-east-2b", "availability_zone": "us-east-2b", - "cloud-name": "aws", "cloud_name": "aws", - "instance-id": "i-04fa31cfc55aa7976", - "instance_id": "i-04fa31cfc55aa7976", - "local-hostname": "ip-10-41-41-70", - "local_hostname": "ip-10-41-41-70", + "instance_id": "i-026b5bf335079dfa6", + "local_hostname": "ip-10-41-41-153", + "platform_type": "ec2", + "public_ssh_keys": [ + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB cloud-init-integration" + ], "region": "us-east-2" } } + Using instance-data =================== diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py index 497e761..f6b4975 100644 --- a/tests/unittests/test_datasource/test_ec2.py +++ b/tests/unittests/test_datasource/test_ec2.py @@ -351,7 +351,9 @@ class TestEc2(test_helpers.HttprettyTestCase): m_get_interface_mac.return_value = mac1 nc = ds.network_config # Will re-crawl network metadata self.assertIsNotNone(nc) - self.assertIn('Re-crawl of metadata service', self.logs.getvalue()) + self.assertIn( + 'Refreshing stale metadata from prior to upgrade', + self.logs.getvalue()) expected = {'version': 1, 'config': [ {'mac_address': '06:17:04:d7:26:09', 'name': 'eth9', @@ -386,7 +388,7 @@ class TestEc2(test_helpers.HttprettyTestCase): register_mock_metaserver( '{0}/{1}/dynamic/'.format(ds.metadata_address, all_versions[-1]), DYNAMIC_METADATA) - ds._cloud_platform = ec2.Platforms.AWS + ds._cloud_name = ec2.CloudNames.AWS # Setup cached metadata on the Datasource ds.metadata = DEFAULT_METADATA self.assertEqual('my-identity-id', ds.get_instance_id()) @@ -439,16 +441,16 @@ class TestEc2(test_helpers.HttprettyTestCase): sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, md=DEFAULT_METADATA) platform_attrs = [ - attr for attr in ec2.Platforms.__dict__.keys() + attr for attr in ec2.CloudNames.__dict__.keys() if not attr.startswith('__')] for attr_name in platform_attrs: - platform_name = getattr(ec2.Platforms, attr_name) - if platform_name != 'AWS': - ds._cloud_platform = platform_name + platform_name = getattr(ec2.CloudNames, attr_name) + if platform_name != 'aws': + ds._cloud_name = platform_name ret = ds.get_data() self.assertFalse(ret) message = ( - "Local Ec2 mode only supported on ('AWS',)," + "Local Ec2 mode only supported on ('aws',)," ' not {0}'.format(platform_name)) self.assertIn(message, self.logs.getvalue())
_______________________________________________ Mailing list: https://launchpad.net/~cloud-init-dev Post to : cloud-init-dev@lists.launchpad.net Unsubscribe : https://launchpad.net/~cloud-init-dev More help : https://help.launchpad.net/ListHelp