prometheanfire 14/08/21 20:40:15 Added: 2014.1.2-CVE-2014-5356.patch Removed: CVE-2014-0162-2013.2.3.patch Log: bup and fix for bug 520352 CVE-2014-5356 (Portage version: 2.2.8-r1/cvs/Linux x86_64, signed Manifest commit with key 0x2471eb3e40ac5ac3)
Revision Changes Path 1.1 app-admin/glance/files/2014.1.2-CVE-2014-5356.patch file : http://sources.gentoo.org/viewvc.cgi/gentoo-x86/app-admin/glance/files/2014.1.2-CVE-2014-5356.patch?rev=1.1&view=markup plain: http://sources.gentoo.org/viewvc.cgi/gentoo-x86/app-admin/glance/files/2014.1.2-CVE-2014-5356.patch?rev=1.1&content-type=text/plain Index: 2014.1.2-CVE-2014-5356.patch =================================================================== >From 31a4d1852a0c27bac5757c192f300f051229a312 Mon Sep 17 00:00:00 2001 From: Tom Leaman <[email protected]> Date: Fri, 2 May 2014 10:09:20 +0000 Subject: Enforce image_size_cap on v2 upload image_size_cap should be checked and enforced on upload Enforcement is in two places: - on image metadata save - during image save to backend store (cherry picked from commit 92ab00fca6926eaf3f7f92a955a5e07140063718) Conflicts: glance/location.py glance/tests/functional/v2/test_images.py Closes-Bug: 1315321 Change-Id: I45bfb360703617bc394e9e27fe17adf43b09c0e1 Co-Author: Manuel Desbonnet <[email protected]> diff --git a/glance/db/__init__.py b/glance/db/__init__.py index a6e804c..a59447d 100644 --- a/glance/db/__init__.py +++ b/glance/db/__init__.py @@ -27,6 +27,7 @@ from glance.openstack.common import importutils CONF = cfg.CONF +CONF.import_opt('image_size_cap', 'glance.common.config') CONF.import_opt('metadata_encryption_key', 'glance.common.config') @@ -150,6 +151,8 @@ class ImageRepo(object): def add(self, image): image_values = self._format_image_to_db(image) + if image_values['size'] > CONF.image_size_cap: + raise exception.ImageSizeLimitExceeded # the updated_at value is not set in the _format_image_to_db # function since it is specific to image create image_values['updated_at'] = image.updated_at @@ -161,6 +164,8 @@ class ImageRepo(object): def save(self, image): image_values = self._format_image_to_db(image) + if image_values['size'] > CONF.image_size_cap: + raise exception.ImageSizeLimitExceeded try: new_values = self.db_api.image_update(self.context, image.image_id, diff --git a/glance/store/__init__.py b/glance/store/__init__.py index 33a67d6..273b7c7 100644 --- a/glance/store/__init__.py +++ b/glance/store/__init__.py @@ -721,7 +721,10 @@ class ImageProxy(glance.domain.proxy.Image): size = 0 # NOTE(markwash): zero -> unknown size location, size, checksum, loc_meta = self.store_api.add_to_backend( self.context, CONF.default_store, - self.image.image_id, utils.CooperativeReader(data), size) + self.image.image_id, + utils.LimitingReader(utils.CooperativeReader(data), + CONF.image_size_cap), + size) self.image.locations = [{'url': location, 'metadata': loc_meta}] self.image.size = size self.image.checksum = checksum diff --git a/glance/tests/functional/__init__.py b/glance/tests/functional/__init__.py index 537a42f..2f116f0 100644 --- a/glance/tests/functional/__init__.py +++ b/glance/tests/functional/__init__.py @@ -280,6 +280,7 @@ class ApiServer(Server): self.pid_file = pid_file or os.path.join(self.test_dir, "api.pid") self.scrubber_datadir = os.path.join(self.test_dir, "scrubber") self.log_file = os.path.join(self.test_dir, "api.log") + self.image_size_cap = 1099511627776 self.s3_store_host = "s3.amazonaws.com" self.s3_store_access_key = "" self.s3_store_secret_key = "" @@ -341,6 +342,7 @@ metadata_encryption_key = %(metadata_encryption_key)s registry_host = 127.0.0.1 registry_port = %(registry_port)s log_file = %(log_file)s +image_size_cap = %(image_size_cap)d s3_store_host = %(s3_store_host)s s3_store_access_key = %(s3_store_access_key)s s3_store_secret_key = %(s3_store_secret_key)s diff --git a/glance/tests/functional/v2/test_images.py b/glance/tests/functional/v2/test_images.py index a309e64..4247434 100644 --- a/glance/tests/functional/v2/test_images.py +++ b/glance/tests/functional/v2/test_images.py @@ -451,6 +451,48 @@ class TestImages(functional.FunctionalTest): self.stop_servers() + def test_image_size_cap(self): + self.api_server.image_size_cap = 128 + self.start_servers(**self.__dict__.copy()) + # create an image + path = self._url('/v2/images') + headers = self._headers({'content-type': 'application/json'}) + data = jsonutils.dumps({'name': 'image-size-cap-test-image', + 'type': 'kernel', 'disk_format': 'aki', + 'container_format': 'aki'}) + response = requests.post(path, headers=headers, data=data) + self.assertEqual(201, response.status_code) + + image = jsonutils.loads(response.text) + image_id = image['id'] + + #try to populate it with oversized data + path = self._url('/v2/images/%s/file' % image_id) + headers = self._headers({'Content-Type': 'application/octet-stream'}) + + class StreamSim(object): + # Using a one-shot iterator to force chunked transfer in the PUT + # request + def __init__(self, size): + self.size = size + + def __iter__(self): + yield 'Z' * self.size + + response = requests.put(path, headers=headers, data=StreamSim( + self.api_server.image_size_cap + 1)) + self.assertEqual(413, response.status_code) + + # hashlib.md5('Z'*129).hexdigest() + # == '76522d28cb4418f12704dfa7acd6e7ee' + # If the image has this checksum, it means that the whole stream was + # accepted and written to the store, which should not be the case. + path = self._url('/v2/images/{0}'.format(image_id)) + headers = self._headers({'content-type': 'application/json'}) + response = requests.get(path, headers=headers) + image_checksum = jsonutils.loads(response.text).get('checksum') + self.assertNotEqual(image_checksum, '76522d28cb4418f12704dfa7acd6e7ee') + def test_permissions(self): # Create an image that belongs to TENANT1 path = self._url('/v2/images') diff --git a/glance/tests/unit/test_store_image.py b/glance/tests/unit/test_store_image.py index eb8d333..424915b 100644 --- a/glance/tests/unit/test_store_image.py +++ b/glance/tests/unit/test_store_image.py @@ -119,8 +119,10 @@ class TestStoreImage(utils.BaseTestCase): self.stubs.Set(unit_test_utils.FakeStoreAPI, 'get_from_backend', fake_get_from_backend) - - self.assertEqual(image1.get_data().fd, 'ZZZ') + # This time, image1.get_data() returns the data wrapped in a + # LimitingReader|CooperativeReader pipeline, so peeking under + # the hood of those objects to get at the underlying string. + self.assertEqual(image1.get_data().data.fd, 'ZZZ') image1.locations.pop(0) self.assertEqual(len(image1.locations), 1) image2.delete() diff --git a/glance/tests/unit/utils.py b/glance/tests/unit/utils.py index a43dea3..4186787 100644 --- a/glance/tests/unit/utils.py +++ b/glance/tests/unit/utils.py @@ -148,7 +148,10 @@ class FakeStoreAPI(object): if image_id in location: raise exception.Duplicate() if not size: - size = len(data.fd) + # 'data' is a string wrapped in a LimitingReader|CooperativeReader + # pipeline, so peek under the hood of those objects to get at the + # string itself. + size = len(data.data.fd) if (current_store_size + size) > store_max_size: raise exception.StorageFull() if context.user == USER2: -- cgit v0.10.1
