Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package cloud-init for openSUSE:Factory checked in at 2023-07-18 21:53:44 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/cloud-init (Old) and /work/SRC/openSUSE:Factory/.cloud-init.new.3193 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "cloud-init" Tue Jul 18 21:53:44 2023 rev:88 rq:1098540 version:23.1 Changes: -------- --- /work/SRC/openSUSE:Factory/cloud-init/cloud-init.changes 2023-03-02 23:01:54.826732649 +0100 +++ /work/SRC/openSUSE:Factory/.cloud-init.new.3193/cloud-init.changes 2023-07-18 21:53:45.430172722 +0200 @@ -1,0 +2,26 @@ +Thu Jul 6 12:06:22 UTC 2023 - Robert Schweikert <[email protected]> + +- Update cloud-init-write-routes.patch (bsc#1212879) + + Add necessary import statement +- Enable flake8 linting, fix up patches + + cloud-init-cve-2023-1786-redact-instance-data-json-main.patch + + cloud-init-power-rhel-only.patch + + cloud-init-write-routes.patch + + datasourceLocalDisk.patch + +------------------------------------------------------------------- +Thu Apr 27 12:22:11 UTC 2023 - Robert Schweikert <[email protected]> + +- Add cloud-init-power-rhel-only.patch (bsc#1210273) + + Config module cc_refresh_rmc_and_interface is implemented such that + it will only work on RH distros. Set the module availability accordingly. + +------------------------------------------------------------------- +Tue Apr 11 18:48:30 UTC 2023 - Robert Schweikert <[email protected]> + +- Sensitive data exposure (bsc#1210277, CVE-2023-1786) + + Add hidesensitivedata + + Add cloud-init-cve-2023-1786-redact-inst-data.patch + + Do not expose sensitive data gathered from the CSP + +------------------------------------------------------------------- @@ -1645 +1671 @@ -- Add cloud-init-no-pwd-in-log.patch (bsc#1184758) +- Add cloud-init-no-pwd-in-log.patch (bsc#1184758, CVE-2021-3429) New: ---- cloud-init-cve-2023-1786-redact-instance-data-json-main.patch cloud-init-power-rhel-only.patch hidesensitivedata ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ cloud-init.spec ++++++ --- /var/tmp/diff_new_pack.pCrCuN/_old 2023-07-18 21:53:47.686185171 +0200 +++ /var/tmp/diff_new_pack.pCrCuN/_new 2023-07-18 21:53:47.746185502 +0200 @@ -26,6 +26,7 @@ Group: System/Management Source0: %{name}-%{version}.tar.gz Source1: rsyslog-cloud-init.cfg +Source2: hidesensitivedata Patch1: datasourceLocalDisk.patch # FIXME (lp#1849296) Patch2: cloud-init-break-resolv-symlink.patch @@ -37,6 +38,9 @@ Patch5: cloud-init-fix-ca-test.patch # FIXME (lp#1812117) Patch6: cloud-init-write-routes.patch +Patch7: cloud-init-cve-2023-1786-redact-instance-data-json-main.patch +# FIXME https://github.com/canonical/cloud-init/pull/2148 +Patch8: cloud-init-power-rhel-only.patch BuildRequires: fdupes BuildRequires: filesystem # pkg-config is needed to find correct systemd unit dir @@ -50,6 +54,7 @@ BuildRequires: python3-Jinja2 BuildRequires: python3-PyYAML BuildRequires: python3-configobj >= 5.0.2 +BuildRequires: python3-flake8 BuildRequires: python3-httpretty BuildRequires: python3-jsonpatch BuildRequires: python3-jsonschema @@ -84,7 +89,6 @@ Requires: python3-pyserial Requires: python3-PyYAML Requires: python3-requests -Requires: python3-responses Requires: python3-serial Requires: python3-setuptools Requires: python3-xml @@ -142,6 +146,8 @@ %patch4 %patch5 %patch6 +%patch7 +%patch8 # patch in the full version to version.py version_pys=$(find . -name version.py -type f) @@ -154,6 +160,7 @@ %check make unittest +make flake8 %install python3 setup.py install --root=%{buildroot} --prefix=%{_prefix} --install-lib=%{python3_sitelib} --init-system=%{initsys} @@ -185,6 +192,8 @@ mkdir -p %{buildroot}/usr/lib/udev/rules.d/ cp -a %{SOURCE1} %{buildroot}/%{_sysconfdir}/rsyslog.d/21-cloudinit.conf mv %{buildroot}/lib/udev/rules.d/66-azure-ephemeral.rules %{buildroot}/usr/lib/udev/rules.d/ +mkdir -p %{buildroot}%{_sbindir} +install -m 755 %{SOURCE2} %{buildroot}%{_sbindir} # remove debian/ubuntu specific profile.d file (bnc#779553) rm -f %{buildroot}%{_sysconfdir}/profile.d/Z99-cloud-locale-test.sh @@ -194,6 +203,9 @@ rm %{buildroot}/%{_sysconfdir}/cloud/templates/*.redhat.* rm %{buildroot}/%{_sysconfdir}/cloud/templates/*.ubuntu.* +%post +/usr/sbin/hidesensitivedata + # remove duplicate files %if 0%{?suse_version} %fdupes %{buildroot}%{python3_sitelib} @@ -205,6 +217,7 @@ %{_bindir}/cloud-id %{_bindir}/cloud-init %{_bindir}/cloud-init-per +%{_sbindir}/hidesensitivedata %dir %{_sysconfdir}/cloud %dir %{_sysconfdir}/cloud/clean.d %{_sysconfdir}/cloud/clean.d/README ++++++ cloud-init-cve-2023-1786-redact-instance-data-json-main.patch ++++++ --- cloudinit/sources/DataSourceLXD.py.orig +++ cloudinit/sources/DataSourceLXD.py @@ -173,6 +173,8 @@ class DataSourceLXD(sources.DataSource): "user.meta-data", "user.vendor-data", "user.user-data", + "cloud-init.user-data", + "cloud-init.vendor-data", ) skip_hotplug_detect = True --- cloudinit/sources/DataSourceVultr.py.orig +++ cloudinit/sources/DataSourceVultr.py @@ -5,6 +5,8 @@ # Vultr Metadata API: # https://www.vultr.com/metadata/ +from typing import Tuple + import cloudinit.sources.helpers.vultr as vultr from cloudinit import log as log from cloudinit import sources, util, version @@ -27,6 +29,9 @@ BUILTIN_DS_CONFIG = { class DataSourceVultr(sources.DataSource): dsname = "Vultr" + sensitive_metadata_keys: \ + Tuple[str, ...] = \ + sources.DataSource.sensitive_metadata_keys + ("startup-script",) def __init__(self, sys_cfg, distro, paths): super(DataSourceVultr, self).__init__(sys_cfg, distro, paths) @@ -54,13 +59,8 @@ class DataSourceVultr(sources.DataSource self.get_datasource_data(self.metadata) # Dump some data so diagnosing failures is manageable - LOG.debug("Vultr Vendor Config:") - LOG.debug(util.json_dumps(self.metadata["vendor-data"])) LOG.debug("SUBID: %s", self.metadata["instance-id"]) LOG.debug("Hostname: %s", self.metadata["local-hostname"]) - if self.userdata_raw is not None: - LOG.debug("User-Data:") - LOG.debug(self.userdata_raw) return True @@ -146,7 +146,4 @@ if __name__ == "__main__": config = md["vendor-data"] sysinfo = vultr.get_sysinfo() - print(util.json_dumps(sysinfo)) - print(util.json_dumps(config)) - # vi: ts=4 expandtab --- cloudinit/sources/__init__.py.orig +++ cloudinit/sources/__init__.py @@ -132,6 +132,12 @@ def redact_sensitive_keys(metadata, reda Replace any keys values listed in 'sensitive_keys' with redact_value. """ + # While 'sensitive_keys' should already sanitized to only include what + # is in metadata, it is possible keys will overlap. For example, if + # "merged_cfg" and "merged_cfg/ds/userdata" both match, it's possible that + # "merged_cfg" will get replaced first, meaning "merged_cfg/ds/userdata" + # no longer represents a valid key. + # Thus, we still need to do membership checks in this function. if not metadata.get("sensitive_keys", []): return metadata md_copy = copy.deepcopy(metadata) @@ -139,9 +145,14 @@ def redact_sensitive_keys(metadata, reda path_parts = key_path.split("/") obj = md_copy for path in path_parts: - if isinstance(obj[path], dict) and path != path_parts[-1]: + if ( + path in obj + and isinstance(obj[path], dict) + and path != path_parts[-1] + ): obj = obj[path] - obj[path] = redact_value + if path in obj: + obj[path] = redact_value return md_copy @@ -249,6 +260,14 @@ class DataSource(CloudInitPickleMixin, m sensitive_metadata_keys: Tuple[str, ...] = ( "merged_cfg", "security-credentials", + "userdata", + "user-data", + "user_data", + "vendordata", + "vendor-data", + # Provide ds/vendor_data to avoid redacting top-level + # "vendor_data": {enabled: True} + "ds/vendor_data", ) # True on datasources that may not see hotplugged devices reflected --- cloudinit/stages.py.orig +++ cloudinit/stages.py @@ -203,7 +203,9 @@ class Init: util.ensure_dirs(self._initial_subdirs()) log_file = util.get_cfg_option_str(self.cfg, "def_log_file") if log_file: - util.ensure_file(log_file, mode=0o640, preserve_mode=True) + # At this point the log file should have already been created + # in the setupLogging function of log.py + util.ensure_file(log_file, mode=0o640, preserve_mode=False) perms = self.cfg.get("syslog_fix_perms") if not perms: perms = {} --- tests/unittests/sources/test_init.py.orig +++ tests/unittests/sources/test_init.py @@ -464,6 +464,12 @@ class TestDataSource(CiTestCase): ( "merged_cfg", "security-credentials", + "userdata", + "user-data", + "user_data", + "vendordata", + "vendor-data", + "ds/vendor_data", ), datasource.sensitive_metadata_keys, ) @@ -574,6 +580,12 @@ class TestDataSource(CiTestCase): ( "merged_cfg", "security-credentials", + "userdata", + "user-data", + "user_data", + "vendordata", + "vendor-data", + "ds/vendor_data", ), datasource.sensitive_metadata_keys, ) --- tests/unittests/test_stages.py.orig +++ tests/unittests/test_stages.py @@ -606,19 +606,23 @@ class TestInit_InitializeFilesystem: # Assert we create it 0o640 by default if it doesn't already exist assert 0o640 == stat.S_IMODE(log_file.stat().mode) - def test_existing_file_permissions_are_not_modified(self, init, tmpdir): - """If the log file already exists, we should not modify its permissions + def test_existing_file_permissions(self, init, tmpdir): + """Test file permissions are set as expected. + + CIS Hardening requires 640 permissions. These permissions are + currently hardcoded on every boot, but if there's ever a reason + to change this, we need to then ensure that they + are *not* set every boot. See https://bugs.launchpad.net/cloud-init/+bug/1900837. """ - # Use a mode that will never be made the default so this test will - # always be valid - mode = 0o606 log_file = tmpdir.join("cloud-init.log") log_file.ensure() - log_file.chmod(mode) + # Use a mode that will never be made the default so this test will + # always be valid + log_file.chmod(0o606) init._cfg = {"def_log_file": str(log_file)} init._initialize_filesystem() - assert mode == stat.S_IMODE(log_file.stat().mode) + assert 0o640 == stat.S_IMODE(log_file.stat().mode) ++++++ cloud-init-power-rhel-only.patch ++++++ --- cloudinit/config/cc_refresh_rmc_and_interface.py.orig +++ cloudinit/config/cc_refresh_rmc_and_interface.py @@ -15,7 +15,6 @@ from cloudinit import netinfo, subp, uti from cloudinit.cloud import Cloud from cloudinit.config import Config from cloudinit.config.schema import MetaSchema -from cloudinit.distros import ALL_DISTROS from cloudinit.settings import PER_ALWAYS MODULE_DESCRIPTION = """\ @@ -42,7 +41,7 @@ meta: MetaSchema = { "name": "Refresh IPv6 Interface and RMC", "title": "Ensure Network Manager is not managing IPv6 interface", "description": MODULE_DESCRIPTION, - "distros": [ALL_DISTROS], + "distros": ["fedora", "rhel"], "frequency": PER_ALWAYS, "examples": [], "activate_by_schema_keys": [], ++++++ cloud-init-write-routes.patch ++++++ --- /var/tmp/diff_new_pack.pCrCuN/_old 2023-07-18 21:53:48.210188063 +0200 +++ /var/tmp/diff_new_pack.pCrCuN/_new 2023-07-18 21:53:48.234188196 +0200 @@ -18,7 +18,15 @@ if bring_up: --- cloudinit/distros/opensuse.py.orig +++ cloudinit/distros/opensuse.py -@@ -238,6 +238,143 @@ class Distro(distros.Distro): +@@ -15,6 +15,7 @@ from cloudinit import log as logging + from cloudinit import subp, util + from cloudinit.distros import rhel_util as rhutil + from cloudinit.distros.parsers.hostname import HostnameConf ++from cloudinit.net import ipv4_mask_to_net_prefix + from cloudinit.settings import PER_INSTANCE + + LOG = logging.getLogger(__name__) +@@ -238,6 +239,144 @@ class Distro(distros.Distro): conf.set_hostname(hostname) util.write_file(filename, str(conf), 0o644) @@ -72,10 +80,10 @@ + ) + continue + if ( -+ dest == 'default' and -+ has_default_route and -+ gateway == seen_default_gateway -+ ): ++ dest == 'default' ++ and has_default_route ++ and gateway == seen_default_gateway ++ ): + dest_info = dest + if gateway: + dest_info = ' '.join([dest, gateway, '-', '-']) @@ -138,9 +146,10 @@ + for route in device_config.get('routes', []): + config_routes += self._render_route_string(route) + if config_routes: -+ route_file = '/etc/sysconfig/network/ifroute-{}'.format( -+ device_name -+ ) ++ route_file = \ ++ '/etc/sysconfig/network/ifroute-{}'.format( ++ device_name ++ ) + util.write_file(route_file, config_routes) + except Exception: + # the parser above epxects another level of nesting ++++++ datasourceLocalDisk.patch ++++++ --- /var/tmp/diff_new_pack.pCrCuN/_old 2023-07-18 21:53:48.414189189 +0200 +++ /var/tmp/diff_new_pack.pCrCuN/_new 2023-07-18 21:53:48.446189365 +0200 @@ -1,6 +1,6 @@ ---- cloudinit/sources/DataSourceLocalDisk.py 2016/11/25 19:01:00 1.1 -+++ cloudinit/sources/DataSourceLocalDisk.py 2016/11/26 20:42:02 -@@ -0,0 +1,101 @@ +--- /dev/null ++++ cloudinit/sources/DataSourceLocalDisk.py +@@ -0,0 +1,107 @@ +# vi: ts=4 expandtab +# +# Copyright (C) 2016 SUSE Linux GmbH @@ -51,17 +51,22 @@ + + # Check to see if the seed dir has data. + try: -+ seeded = util.pathprefix2dict(self.seed_dir, ['user-data','meta-data'],['vendor-data']) ++ seeded = util.pathprefix2dict( ++ self.seed_dir, ['user-data', 'meta-data'], ['vendor-data'] ++ ) + found.append(self.seed_dir) + mydata = _merge_new_seed(mydata, seeded) -+ except ValueError as e: ++ except ValueError: + pass + + try: -+ seeded = util.pathprefix2dict('/cloud-init-config', ['user-data','meta-data'],['vendor-data']) ++ seeded = util.pathprefix2dict( ++ '/cloud-init-config', ['user-data', 'meta-data'], ++ ['vendor-data'] ++ ) + found.append('/cloud-init-config') + mydata = _merge_new_seed(mydata, seeded) -+ except ValueError as e: ++ except ValueError: + return False + + # Merge in the defaults @@ -78,6 +83,7 @@ + # quickly (local check only) if self.instance_id is still valid + return sources.instance_id_matches_system_uuid(self.get_instance_id()) + ++ +def _merge_new_seed(cur, seeded): + ret = cur.copy() + ++++++ hidesensitivedata ++++++ #!/usr/bin/python3 import json import os import sys from pathlib import Path from cloudinit.atomic_helper import write_json from cloudinit.sources import ( DataSource, process_instance_metadata, redact_sensitive_keys, ) from cloudinit.stages import Init init = Init() log_file = init.cfg["def_log_file"] if os.path.exists(log_file): os.chmod(log_file, 0o640) rundir = init.paths.run_dir instance_data_path = Path(rundir, "instance-data.json") if not os.path.exists(str(instance_data_path)): sys.exit(0) instance_json = json.load(instance_data_path.open(encoding="utf-8")) sensitive_keys = DataSource.sensitive_metadata_keys processed_json = process_instance_metadata( instance_json, sensitive_keys=sensitive_keys ) redacted_json = redact_sensitive_keys(processed_json) write_json(str(instance_data_path), redacted_json)
