I attached the modelchanges diff from jmatthews's branch vs master.
Note:
* The PackageVersionApi create now takes name in an effort to consider
dropping "Package" class
- def create(self, packageid, epoch, version, release, arch):
+ def create(self, name, epoch, version, release, arch, description,
+ checksum_type, checksum, filename):
what I would do is take it further and merge PackageVersion and Package
class into one object in our API and just call it a Package.
* Addition of sha256 calcs to our datamodel in repo_sync.import_package
Mike
--
Mike McCune
mmccune AT redhat.com
Red Hat Engineering | Portland, OR
Systems Management | 650.254.4248
diff --git a/playpen/mongodb/display_pkgs.py b/playpen/mongodb/display_pkgs.py
new file mode 100755
index 0000000..9e56c71
--- /dev/null
+++ b/playpen/mongodb/display_pkgs.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+import time
+from pymongo import Connection
+from pymongo.son_manipulator import AutoReference, NamespaceInjector
+from optparse import OptionParser
+
+import pulp.util
+from pulp.api.package_version import PackageVersionApi
+
+if __name__ == "__main__":
+
+ package_id = "pulp-test-package"
+ checksum = "6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f"
+ checksum_type = "sha256"
+ filename = "pulp-test-package-0.3.1-1.fc11.x86_64.rpm"
+
+
+ config = pulp.util.loadConfig("../../etc/pulp.ini")
+ pvApi = PackageVersionApi(config)
+
+ found = pvApi.packageversion(filename=filename, checksum_type=checksum_type, checksum=checksum)
+ print "Lookup for %s, %s, %s yielded %s" % (filename, checksum_type, checksum, found)
+
+ db = pvApi.objectdb
+ print "db = %s" % (db)
+ found = db.find({"filename":filename})
+ print "Search for all PV's with %s: %s" % (filename, found)
+ for f in found:
+ print f
+ found = db.find()
+ print "%s PV objects found with an open search" % (found.count())
+
+ found = pvApi.packageversion()
+ print "search with empty searchDict returned %s results" % (found.count())
+
diff --git a/src/pulp/api/base.py b/src/pulp/api/base.py
index 1b9a123..7ae0fb9 100644
--- a/src/pulp/api/base.py
+++ b/src/pulp/api/base.py
@@ -26,7 +26,8 @@ class BaseApi(object):
# Mongo DB
self.connection = pymongo.Connection()
- self.db = self.connection._database
+ #self.db = self.connection._database
+ self.db = self.connection._database_model_changes
# Inject the collection's namespace into each object
self.db.add_son_manipulator(NamespaceInjector())
# Provides auto-referencing/auto-dereferencing ability
diff --git a/src/pulp/api/package_version.py b/src/pulp/api/package_version.py
index be076b3..ab0c364 100644
--- a/src/pulp/api/package_version.py
+++ b/src/pulp/api/package_version.py
@@ -23,29 +23,62 @@ class PackageVersionApi(BaseApi):
def __init__(self, config):
BaseApi.__init__(self, config)
+ self.objectdb.ensure_index([('name', pymongo.DESCENDING),
+ ('epoch', pymongo.DESCENDING),
+ ('version', pymongo.DESCENDING),
+ ('release', pymongo.DESCENDING),
+ ('arch', pymongo.DESCENDING),
+ ('filename', pymongo.DESCENDING),
+ ('checksum', pymongo.DESCENDING)],
+ unique=True, background=True)
def _get_unique_indexes(self):
return []
def _get_indexes(self):
- return ["packageid"]
+ return ["name", "filename", "checksum", "epoch", "version", "release",
+ "arch", "description"]
def _getcollection(self):
return self.db.packageversions
- def create(self, packageid, epoch, version, release, arch):
+ def create(self, name, epoch, version, release, arch, description,
+ checksum_type, checksum, filename):
"""
Create a new PackageVersion object and return it
"""
- pv = model.PackageVersion(packageid, epoch, version, release, arch)
+ pv = model.PackageVersion(name, epoch, version, release, arch, description,
+ checksum_type, checksum, filename)
self.objectdb.insert(pv)
return pv
-
- def packageversion(self, id, filter=None):
+
+ def delete(self, object):
+ """
+ Delete package version object based on "_id" key
+ """
+ self.objectdb.remove({"_id":object["_id"]})
+
+ def packageversion(self, name=None, epoch=None, version=None, release=None, arch=None,
+ filename=None, checksum_type=None, checksum=None):
"""
- Return a single PackageVersion object
+ Return a list of all package version objects matching search terms
"""
- return self.objectdb.find_one({'id': id})
+ searchDict = {}
+ if name:
+ searchDict['name'] = name
+ if epoch:
+ searchDict['epoch'] = epoch
+ if version:
+ searchDict['version'] = version
+ if release:
+ searchDict['release'] = release
+ if arch:
+ searchDict['arch'] = arch
+ if filename:
+ searchDict['filename'] = filename
+ if checksum_type and checksum:
+ searchDict['checksum.%s' % checksum_type] = checksum
+ return self.objectdb.find(searchDict)
def packageversions(self):
"""
diff --git a/src/pulp/api/repo.py b/src/pulp/api/repo.py
index 3ee8858..f2e1015 100644
--- a/src/pulp/api/repo.py
+++ b/src/pulp/api/repo.py
@@ -29,7 +29,7 @@ from grinder.RepoFetch import YumRepoGrinder
from pulp import model
from pulp import repo_sync, upload
from pulp.api.base import BaseApi
-from pulp.api.package import PackageApi
+#from pulp.api.package import PackageApi
from pulp.api.package_version import PackageVersionApi
from pulp.api.package_group import PackageGroupApi
from pulp.api.package_group_category import PackageGroupCategoryApi
@@ -46,13 +46,19 @@ class RepoApi(BaseApi):
def __init__(self, config):
BaseApi.__init__(self, config)
- self.packageApi = PackageApi(config)
+ #self.packageApi = PackageApi(config)
self.packageVersionApi = PackageVersionApi(config)
self.packageGroupApi = PackageGroupApi(config)
self.packageGroupCategoryApi = PackageGroupCategoryApi(config)
# TODO: Extract this to a config
self.localStoragePath = config.get('paths', 'local_storage')
+
+ def _get_indexes(self):
+ return ["packages", "packagegroups", "packagegroupcategories"]
+
+ def _get_unique_indexes(self):
+ return ["id"]
def _getcollection(self):
return self.db.repos
@@ -71,11 +77,128 @@ class RepoApi(BaseApi):
def packages(self, id):
"""
- Return list of Package objects in this Repo
+ Return dictionary of PackageVersion objects in this Repo, key is package name
"""
repo = self.repository(id)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % id)
return repo['packages']
+ def packageversions(self, repoid, name):
+ """
+ Return list of PackageVersions objects for this repo and package name
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ if not repo["packages"].has_key(name):
+ return None
+ return repo["packages"][name]
+
+ def add_package_version(self, repoid, pv):
+ """
+ Adds the passed in package version to this repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ if not repo["packages"].has_key(pv['name']):
+ repo["packages"][pv['name']] = []
+ # TODO: We might want to restrict PackageVersions we add to only
+ # allow 1 NEVRA per repo and require filename to be unique
+ for item in repo["packages"][pv['name']]:
+ if item['_id'] == pv['_id']:
+ # No need to update repo, this PackageVersion is already under this repo
+ return
+ # Note: A DBRef() for the objects '_id' is what's added in mongo
+ # This is a reference to the PackageVersion collection's object
+ repo["packages"][pv['name']].append(pv)
+ self.update(repo)
+
+ def remove_package_version(self, repoid, pv):
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ if not repo["packages"].has_key(pv['name']):
+ raise PulpException("No Package with name: %s found in repo: %s" %
+ (pv['name'], repoid))
+ for item in repo["packages"][pv['name']]:
+ if item['name'] == pv['name'] and \
+ item['version'] == pv['version'] and \
+ item['epoch'] == pv['epoch'] and \
+ item['release'] == pv['release'] and \
+ item['arch'] == pv['arch']:
+ repo['packages'][pv['name']].remove(item)
+ if len(repo['packages'][pv['name']]) == 0:
+ # list is empty now, so cleanup and remove
+ # it from the packages
+ del repo['packages'][pv['name']]
+ self.update(repo)
+
+ def remove_packagegroup(self, repoid, groupid):
+ """
+ Remove a packagegroup from a repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ if repo['packagegroups'].has_key(groupid):
+ del repo['packagegroups'][groupid]
+ self.update(repo)
+
+ def update_packagegroup(self, repoid, pg):
+ """
+ Save the passed in PackageGroup to this repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ repo['packagegroups'][pg['id']] = pg
+ self.update(repo)
+
+ def update_packagegroups(self, repoid, pglist):
+ """
+ Save the list of passed in PackageGroup objects to this repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ for item in pglist:
+ repo['packagegroups'][item['id']] = item
+ self.update(repo)
+
+ def translate_packagegroup(self, obj):
+ """
+ Translate a SON Document to an object that yum.comps.Comps can work with
+ """
+ # Main reason for doing this is that yum.comps expects the passed in
+ # object to support dot notation references, the returned SON document
+ # does not support this, so yum.comps isn't able to read the info
+ #TODO: More work is needed in this method before output of groups will work
+ pg = model.PackageGroup(obj['id'], obj['name'], obj['description'],
+ user_visible=obj['user_visible'], display_order=obj['display_order'],
+ default=obj['default'], langonly=obj['langonly'])
+ pg.groupid = obj['id']
+ pg.translated_name = {}
+ for key in obj['translated_name']:
+ pg.translated_name[key] = obj['translated_name'][key]
+ pg.translated_description = {}
+ for key in obj['translated_description']:
+ pg.translated_description[key] = obj['translated_description']
+ pg.mandatory_packages = {}
+ for pkgname in obj['mandatory_package_names']:
+ pg.mandatory_packages[pkgname] = 1
+ pg.optional_packages = {}
+ for pkgname in obj['optional_package_names']:
+ pg.optional_packages[pkgname] = 1
+ pg.default_packages = {}
+ for pkgname in obj['default_package_names']:
+ pg.default_packages[pkgname] = 1
+ pg.conditional_packages = {}
+ for key in obj['conditional_package_names']:
+ pg.conditional_packages[key] = obj['conditional_package_names'][key]
+ return pg
+
def packagegroups(self, id):
"""
Return list of PackageGroup objects in this Repo
@@ -83,13 +206,98 @@ class RepoApi(BaseApi):
repo = self.repository(id)
return repo['packagegroups']
+ def packagegroup(self, repoid, groupid):
+ """
+ Return a PackageGroup from this Repo
+ """
+ repo = self.repository(repoid)
+ if not repo['packagegroups'].has_key(groupid):
+ return None
+ return repo['packagegroups'][groupid]
+
+ def remove_packagegroupcategory(self, repoid, categoryid):
+ """
+ Remove a packagegroupcategory from a repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ if repo['packagegroupcategories'].has_key(categoryid):
+ del repo['packagegroupcategories'][categoryid]
+ self.update(repo)
+
+ def update_packagegroupcategory(self, repoid, pgc):
+ """
+ Save the passed in PackageGroupCategory to this repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ repo['packagegroupcategories'][pgc['id']] = pgc
+ self.update(repo)
+
+ def update_packagegroupcategories(self, repoid, pgclist):
+ """
+ Save the list of passed in PackageGroupCategory objects to this repo
+ """
+ repo = self.repository(repoid)
+ if (repo == None):
+ raise PulpException("No Repo with id: %s found" % repoid)
+ for item in pgclist:
+ repo['packagegroupcategories'][item['id']] = item
+ self.update(repo)
+
+ def translate_packagegroupcategory(self, obj):
+ """
+ Translate a SON Document to an object that yum.comps.Comps can work with
+ """
+ #TODO: More work is needed in this method before output of categories will work
+ pgc = model.PackageGroupCategory(obj['id'], obj['name'], obj['description'],
+ display_order=obj['display_order'])
+ pgc.categoryid = obj['id']
+ pgc.translated_name = {}
+ for key in obj['translated_name']:
+ pgc.translated_name[key] = obj['translated_name'][key]
+ pgc.translated_description = {}
+ for key in obj['translated_description']:
+ pgc.translated_description[key] = obj['translated_description'][key]
+ pgc._groups = {}
+ for groupid in obj['packagegroupids']:
+ pgc._groups[groupid] = groupid
+ return pgc
+
+ def packagegroups(self, id):
+ """
+ Return list of PackageGroup objects in this Repo
+ """
+ repo = self.repository(id)
+ return repo['packagegroups']
+
+ def packagegroup(self, repoid, groupid):
+ """
+ Return a PackageGroup from this Repo
+ """
+ repo = self.repository(repoid)
+ if not repo['packagegroups'].has_key(groupid):
+ return None
+ return repo['packagegroups'][groupid]
+
def packagegroupcategories(self, id):
"""
Return list of PackageGroupCategory objects in this Repo
"""
repo = self.repository(id)
return repo['packagegroupcategories']
-
+
+ def packagegroupcategory(self, repoid, categoryid):
+ """
+ Return a PackageGroupCategory object from this Repo
+ """
+ repo = self.repository(repoid)
+ if not repo['packagegroupcategories'].has_key(categoryid):
+ return None
+ return repo['packagegroupcategories'][categoryid]
+
def create(self, id, name, arch, feed):
"""
Create a new Repository object and return it
diff --git a/src/pulp/model.py b/src/pulp/model.py
index 9384727..ce4d4a3 100644
--- a/src/pulp/model.py
+++ b/src/pulp/model.py
@@ -64,30 +64,35 @@ class RepoSource(Base):
class Package(Base):
- def __init__(self, packageid, description):
- #TODO: move 'description' to PackageVersion
+ def __init__(self, repoid, packageid):
#TODO: Consider getting rid of 'package', we might not need it
+ self.repoid = repoid
self.packageid = packageid
- self.description = description
self.versions = []
class PackageVersion(Base):
- def __init__(self, packageid, epoch, version, release, arch):
- self.packageid = packageid
+ def __init__(self, name, epoch, version, release, arch, description,
+ checksum_type, checksum, filename):
+ #TODO: Can we enforce unique indexes between keys?
+ self.name = name
self.epoch = epoch
self.version = version
self.release = release
self.arch = arch
- #TODO: add support for 'filename' and 'checksum' to constructor, apis, and tests
- #self.filename = ""
- #self.checksum = {}
+ self.description = description
+ self.filename = filename
+ self.checksum = {checksum_type:checksum}
+ # Add gpg keys
self.requires = []
self.provides = []
class PackageGroup(Base):
- def __init__(self, groupid, name, description, user_visible=False,
+ """
+ Class represents a yum.comps.Group
+ """
+ def __init__(self, id, name, description, user_visible=False,
display_order=1024, default=True, langonly=None):
- self.groupid = groupid
+ self.id = id
self.name = name
self.description = description
self.user_visible = user_visible
@@ -102,8 +107,8 @@ class PackageGroup(Base):
self.translated_description = {}
class PackageGroupCategory(Base):
- def __init__(self, categoryid, name, description, display_order=99):
- self.categoryid = categoryid
+ def __init__(self, id, name, description, display_order=99):
+ self.id = id
self.name = name
self.description = description
self.display_order = display_order
diff --git a/src/pulp/repo_sync.py b/src/pulp/repo_sync.py
index f5264ea..1fa9cf1 100644
--- a/src/pulp/repo_sync.py
+++ b/src/pulp/repo_sync.py
@@ -17,6 +17,7 @@
import gzip
import logging
import os
+import time
import traceback
from urlparse import urlparse
@@ -60,94 +61,87 @@ class BaseSynchronizer(object):
self.package_group_api = PackageGroupApi(config)
def add_packages_from_dir(self, dir, repo):
-
dir_list = os.listdir(dir)
package_count = 0
+ startTime = time.time()
for fname in dir_list:
self.import_package(dir + fname, repo)
package_count = package_count + 1
- log.debug("read [%s] packages" % package_count)
- self._read_comps_xml(dir, repo)
+ endTime = time.time()
+ log.debug("Repo: %s read [%s] packages took %s seconds" %
+ (repo['id'], package_count, endTime - startTime))
+ # TODO: Parse repomd.xml and lookup name for groups element
+ compsfile = None
+ compspath = os.path.join(dir, 'repodata/comps.xml')
+ if os.path.isfile(compspath):
+ compsfile = open(compspath, "r")
+ else:
+ compspath = os.path.join(dir, 'repodata/comps.xml.gz')
+ if os.path.isfile(compspath):
+ compsfile = gzip.open(compspath, 'r')
+ if compsfile:
+ repo['comps_xml_path'] = compspath
+ self.import_groups_data(compsfile, repo)
+ log.debug("Loaded comps info from %s" % (compspath))
def import_package(self, pkg_path, repo):
- packages = repo['packages']
if (pkg_path.endswith(".rpm")):
try:
+ file_name = os.path.basename(pkg_path)
info = pulp.util.get_rpm_information(pkg_path)
- p = self.package_api.package(info['name'])
- if not p:
- p = self.package_api.create(info['name'], info['description'])
-
- pv = self.package_version_api.packageversion_by_ivera(p['packageid'],
- info['version'],
- info['epoch'],
- info['release'],
- info['arch'],)
- if not pv:
- pv = self.package_version_api.create(p["packageid"], info['epoch'],
- info['version'], info['release'], info['arch'])
+ if not repo["packages"].has_key(info['name']):
+ repo["packages"][info['name']] = []
+ hashtype = "sha256"
+ checksum = pulp.util.getFileChecksum(hashtype=hashtype,
+ filename=pkg_path)
+ found = self.package_version_api.packageversion(name=info['name'],
+ epoch=info['epoch'], version=info['version'],
+ release=info['release'], arch=info['arch'],filename=file_name,
+ checksum_type=hashtype, checksum=checksum)
+ if found.count() == 1:
+ pv = found[0]
+ else:
+ pv = self.package_version_api.create(info['name'], info['epoch'],
+ info['version'], info['release'], info['arch'], info['description'],
+ "sha256", checksum, file_name)
for dep in info['requires']:
pv.requires.append(dep)
for dep in info['provides']:
pv.provides.append(dep)
self.package_version_api.update(pv)
-
- p["versions"].append(pv)
- self.package_api.update(p)
- packages[p["packageid"]] = p
- log.debug("Repo <%s> added package <%s> with %s versions" %
- (repo["id"], p["packageid"], len(p["versions"])))
+ #TODO: Ensure we don't add duplicate pv's to the 'packages' list
+ repo['packages'][info['name']].append(pv)
except Exception, e:
- log.debug("Exception = %s" % (traceback.format_exc()))
+ log.debug("%s" % (traceback.format_exc()))
log.error("error reading package %s" % (pkg_path))
- def _read_comps_xml(self, dir, repo):
+ def import_groups_data(self, compsfile, repo):
"""
Reads a comps.xml or comps.xml.gz under repodata from dir
Loads PackageGroup and Category info our db
"""
-
- compspath = os.path.join(dir, 'repodata/comps.xml')
- compsxml = None
- if os.path.isfile(compspath):
- compsxml = open(compspath, "r")
- else:
- compspath = os.path.join(dir, 'repodata/comps.xml.gz')
- if os.path.isfile(compspath):
- compsxml = gzip.open(compspath, 'r')
-
- if not compsxml:
- log.info("Not able to find a comps.xml(.gz) to read")
- return False
-
- log.info("Reading comps info from %s" % (compspath))
- repo['comps_xml_path'] = compspath
try:
comps = yum.comps.Comps()
- comps.add(compsxml)
+ comps.add(compsfile)
for c in comps.categories:
- ctg = self.package_group_category_api.create(c.categoryid, c.name,
- c.description, c.display_order)
+ ctg = model.PackageGroupCategory(c.categoryid, c.name,
+ c.description, c.display_order)
groupids = [grp for grp in c.groups]
- ctg.packagegroupids.extend(groupids)
- ctg.translated_name = c.translated_name
- ctg.translated_description = c.translated_description
- self.package_group_category_api.update(ctg)
- repo['packagegroupcategories'][ctg.categoryid] = ctg
-
+ ctg['packagegroupids'].extend(groupids)
+ ctg['translated_name'] = c.translated_name
+ ctg['translated_description'] = c.translated_description
+ repo['packagegroupcategories'][ctg['id']] = ctg
for g in comps.groups:
- grp = self.package_group_api.create(g.groupid, g.name, g.description,
- g.user_visible, g.display_order, g.default, g.langonly)
+ grp = model.PackageGroup(g.groupid, g.name, g.description,
+ g.user_visible, g.display_order, g.default, g.langonly)
grp.mandatory_package_names.extend(g.mandatory_packages.keys())
grp.optional_package_names.extend(g.optional_packages.keys())
grp.default_package_names.extend(g.default_packages.keys())
grp.conditional_package_names = g.conditional_packages
grp.translated_name = g.translated_name
grp.translated_description = g.translated_description
- self.package_group_api.update(grp)
- repo['packagegroups'][grp.groupid] = grp
- log.info("Comps info added from %s" % (compspath))
- except yum.comps.CompsException:
+ repo['packagegroups'][grp['id']] = grp
+ except yum.Errors.CompsException:
log.error("Unable to parse comps info for %s" % (compspath))
return False
return True
diff --git a/test/common/large_load.py b/test/common/large_load.py
index 413cd0d..8098380 100644
--- a/test/common/large_load.py
+++ b/test/common/large_load.py
@@ -87,9 +87,9 @@ class LargeLoad(unittest.TestCase):
# self.capi.update(c)
if (i % 100 == 0):
print "created [%s] consumers" % i
- p = Package(TEST_PACKAGE_ID, 'random package to be found')
+ p = Package(repo["id"], TEST_PACKAGE_ID, 'random package to be found')
c.packageids.append(p.id)
- # self.capi.update(c)
+ #self.capi.update(c)
last_desc = c.description
last_id = c.id
consumers.append(c)
@@ -117,6 +117,11 @@ class LargeLoad(unittest.TestCase):
cwithp = ll.capi.consumerswithpackage(TEST_PACKAGE_ID)
print "Found [%s] consumers with packageid: [%s]" % (len(cwithp), TEST_PACKAGE_ID)
+
+print "This has not been updated with the branch modelchanges yet"
+print "More work is needed on the consumer side"
+sys.exit(1)
+
parser = optparse.OptionParser()
parser.add_option('--dirlist', dest='dirlist',
action='store', help='File containing list of directories containing the repos you wish to use for this test')
diff --git a/test/unit/test_api.py b/test/unit/test_api.py
index eaa16d6..7eae1eb 100644
--- a/test/unit/test_api.py
+++ b/test/unit/test_api.py
@@ -53,7 +53,7 @@ from testutil import load_test_config
class TestApi(unittest.TestCase):
def clean(self):
self.rapi.clean()
- self.papi.clean()
+ #self.papi.clean()
self.capi.clean()
self.pvapi.clean()
self.pgapi.clean()
@@ -63,7 +63,8 @@ class TestApi(unittest.TestCase):
config = load_test_config()
self.rapi = RepoApi(config)
- self.papi = PackageApi(config)
+ self.rapi.localStoragePath = "/tmp"
+ #self.papi = PackageApi(config)
self.capi = ConsumerApi(config)
self.pvapi = PackageVersionApi(config)
self.pgapi = PackageGroupApi(config)
@@ -154,8 +155,10 @@ class TestApi(unittest.TestCase):
def test_repo_packages(self):
repo = self.rapi.create('some-id','some name', \
'i386', 'yum:http://example.com')
- package = Package('test_repo_packages','test package')
- repo['packages'][package["packageid"]] = package
+ pv = self.create_package_version('test_repo_packages')
+ # package = PackageVersion('test_repo_packages','test package')
+ self.rapi.add_package_version(repo["id"], pv)
+ # repo['packages'][package["packageid"]] = package
self.rapi.update(repo)
found = self.rapi.repository('some-id')
@@ -170,42 +173,33 @@ class TestApi(unittest.TestCase):
'test-group-description')
package = Package('test_repo_packages','test package')
pkggroup.default_package_names.append(package["packageid"])
- repo['packagegroups'][pkggroup["groupid"]] = pkggroup
+ repo['packagegroups'][pkggroup["id"]] = pkggroup
repo['packages'][package["packageid"]] = package
+
self.rapi.update(repo)
found = self.rapi.repository('some-id')
- packages = found['packages']
- assert(packages != None)
- assert(packages['test_repo_packages'] != None)
assert(found['packagegroups'] != None)
- print "test_repo_package_groups found['packagegroups'] = %s" % (found['packagegroups'])
- assert(pkggroup.groupid in found['packagegroups'])
+ assert(pkggroup['id'] in found['packagegroups'])
def test_repo_package_group_categories(self):
- repo = self.rapi.create('some-id','some name', \
+ repo = self.rapi.create('some-id_pkg_group_categories','some name', \
'i386', 'yum:http://example.com')
- package = Package('test_repo_packages','test package')
pkggroup = PackageGroup('test-group-id', 'test-group-name',
'test-group-description')
- pkggroup.default_package_names.append(package["packageid"])
+ pkggroup.default_package_names.append("test-package-name")
ctg = PackageGroupCategory('test-group-cat-id', 'test-group-cat-name',
'test-group-cat-description')
ctg.packagegroupids = pkggroup.id
- repo['packagegroupcategories'][ctg.categoryid] = ctg
- repo['packagegroups'][pkggroup.groupid] = pkggroup
- repo['packages'][package["packageid"]] = package
+ repo['packagegroupcategories'][ctg.id] = ctg
+ repo['packagegroups'][pkggroup.id] = pkggroup
self.rapi.update(repo)
- found = self.rapi.repository('some-id')
- packages = found['packages']
- assert(packages != None)
- assert(packages['test_repo_packages'] != None)
+ found = self.rapi.repository('some-id_pkg_group_categories')
assert(found['packagegroups'] != None)
- print "test_repo_package_groups found['packagegroups'] = %s" % (found['packagegroups'])
- assert(pkggroup.groupid in found['packagegroups'])
+ assert(pkggroup['id'] in found['packagegroups'])
assert(found['packagegroupcategories'] != None)
- assert(ctg.categoryid in found['packagegroupcategories'])
+ assert(ctg['id'] in found['packagegroupcategories'])
def test_consumer_create(self):
c = self.capi.create('test-consumer', 'some consumer desc')
@@ -235,8 +229,11 @@ class TestApi(unittest.TestCase):
def test_consumerwithpackage(self):
c = self.capi.create('test-consumer', 'some consumer desc')
- package = Package('test_consumerwithpackage','test package search')
- c.packageids.append(package["packageid"])
+ repo = self.rapi.create('some-id', 'some name',
+ 'i386', 'yum:http://example.com')
+ test_pkg_name = "test_consumerwithpackage"
+ #TODO: The consumer model/api needs to be updated, it's not setup to handle
+ # tracking a packageversion
for i in range(10):
package = Package(random_string(), random_string())
c.packageids.append(package["packageid"])
@@ -257,7 +254,7 @@ class TestApi(unittest.TestCase):
assert(parsed != None)
print parsed
- def test_sync_two_repos_share_common_package(self):
+ def test_sync_two_repos_same_nevra_different_checksum(self):
"""
Sync 2 repos that have a package with same NEVRA
but different checksum
@@ -273,10 +270,9 @@ class TestApi(unittest.TestCase):
repo_a = self.rapi.create(repo_name_a,'some name', 'x86_64',
'local:file://%s' % datadir_a)
repo_b = self.rapi.create(repo_name_b,'some name', 'x86_64',
- 'local:file://%s' % datadir_b)
- self.rapi.sync(repo_a.id)
- self.rapi.sync(repo_b.id)
-
+ 'local:file://%s' % datadir_b)
+ self.rapi.sync(repo_a["id"])
+ self.rapi.sync(repo_b["id"])
# Look up each repo from API
found_a = self.rapi.repository(repo_a.id)
found_b = self.rapi.repository(repo_b.id)
@@ -286,15 +282,14 @@ class TestApi(unittest.TestCase):
assert (found_b["packages"].has_key(test_pkg_name))
# Grab the associated package version (there should only be 1)
- # Ensure that the package versions have different md5sums, but all other
+ # Ensure that the package versions have different checksums, but all other
# keys are identical
- assert (len(found_a["packages"][test_pkg_name]["versions"]) == 1)
- assert (len(found_b["packages"][test_pkg_name]["versions"]) == 1)
- pkgVerA = found_a["packages"][test_pkg_name]["versions"][0]
- pkgVerB = found_a["packages"][test_pkg_name]["versions"][0]
- for key in ['epoch', 'version', 'release', 'arch']:
+ assert (len(found_a["packages"][test_pkg_name]) == 1)
+ assert (len(found_b["packages"][test_pkg_name]) == 1)
+ pkgVerA = found_a["packages"][test_pkg_name][0]
+ pkgVerB = found_b["packages"][test_pkg_name][0]
+ for key in ['epoch', 'version', 'release', 'arch', 'filename', 'name']:
assert (pkgVerA[key] == pkgVerB[key])
-
#TODO:
# Add test to compare checksum when it's implemented in PackageVersion
# verify the checksums are different
@@ -304,12 +299,12 @@ class TestApi(unittest.TestCase):
Sync 2 repos that share a common package, same NEVRA
same checksum
"""
- test_pkg_name = "pulp-test-package-same-nevra"
+ test_pkg_name = "pulp-test-package"
my_dir = os.path.abspath(os.path.dirname(__file__))
repo_name_a = "test_two_repos_share_common_pkg_repo_A"
repo_name_b = "test_two_repos_share_common_pkg_repo_B"
- datadir_a = my_dir + "/data/sameNEVRA_differentChecksums/A/repo/"
- datadir_b = my_dir + "/data/sameNEVRA_differentChecksums/B/repo/"
+ datadir_a = my_dir + "/data/sameNEVRA_sameChecksums/A/repo/"
+ datadir_b = my_dir + "/data/sameNEVRA_sameChecksums/B/repo/"
# Create & Sync Repos
repo_a = self.rapi.create(repo_name_a,'some name', 'x86_64',
'local:file://%s' % datadir_a)
@@ -327,14 +322,14 @@ class TestApi(unittest.TestCase):
# Ensure that the package versions have different md5sums, but all other
# keys are identical
- # BELOW TEST Needs more changes to model/sync code before it can pass
- #assert (len(found_a["packages"][test_pkg_name]["versions"]) == 1)
- #assert (len(found_b["packages"][test_pkg_name]["versions"]) == 1)
- #pkgVerA = found_a["packages"][test_pkg_name]["versions"][0]
- #pkgVerB = found_a["packages"][test_pkg_name]["versions"][0]
+ assert (len(found_a["packages"][test_pkg_name]) == 1)
+ assert (len(found_b["packages"][test_pkg_name]) == 1)
+ pkgVerA = found_a["packages"][test_pkg_name][0]
+ pkgVerB = found_b["packages"][test_pkg_name][0]
# Ensure that the 2 PackageVersions instances actually point
# to the same single instance
- #assert(pkgVerA['_id'] == pkgVerB['_id'])
+ assert(repo_a['_id'] != repo_b['_id'])
+ assert(pkgVerA['_id'] == pkgVerB['_id'])
def test_sync(self):
repo = self.rapi.create('some-id','some name', 'i386',
@@ -352,7 +347,9 @@ class TestApi(unittest.TestCase):
dirList = os.listdir(self.rapi.localStoragePath + '/' + repo.id)
assert(len(dirList) > 0)
found = self.rapi.repository(repo.id)
+ print "found = ", found
packages = found['packages']
+ print "packages = ", packages
assert(packages != None)
assert(len(packages) > 0)
@@ -369,27 +366,84 @@ class TestApi(unittest.TestCase):
assert(len(packages) > 0)
print packages
p = packages.values()[0]
- assert(p['versions'] != None)
+ assert(p != None)
# versions = p['versions']
- def test_package_versions(self):
- p = self.papi.create('some-package-id', 'some package desc')
- pv = self.pvapi.create(p.packageid, 0, '1.2.3', '1', 'i386')
- p.versions.append(pv)
- self.papi.update(p)
+ def create_package_version(self, name):
+ test_pkg_name = name
+ test_epoch = "1"
+ test_version = "1.2.3"
+ test_release = "1.el5"
+ test_arch = "x86_64"
+ test_description = "test description text"
+ test_checksum_type = "sha256"
+ test_checksum = "9d05cc3dbdc94150966f66d76488a3ed34811226735e56dc3e7a721de194b42e"
+ test_filename = "test-filename-1.2.3-1.el5.x86_64.rpm"
+ pv = self.pvapi.create(name=test_pkg_name, epoch=test_epoch, version=test_version,
+ release=test_release, arch=test_arch, description=test_description,
+ checksum_type="sha256", checksum=test_checksum, filename=test_filename)
+ return pv
- found = self.papi.package(p.packageid)
- versions = found['versions']
- assert(versions != None)
- assert(versions[0]['packageid'] == p.packageid)
- print found
-
- def test_packages(self):
- p = self.papi.create('some-package-id', 'some package desc')
- packages = self.papi.packages()
- print "packages: %s" % packages
- assert(len(packages) > 0)
-
+ def test_package_versions(self):
+ repo = self.rapi.create('some-id','some name',
+ 'i386', 'yum:http://example.com')
+ repo = self.rapi.repository(repo["id"])
+ test_pkg_name = "test_package_versions_name"
+ test_epoch = "1"
+ test_version = "1.2.3"
+ test_release = "1.el5"
+ test_arch = "x86_64"
+ test_description = "test description text"
+ test_checksum_type = "sha256"
+ test_checksum = "9d05cc3dbdc94150966f66d76488a3ed34811226735e56dc3e7a721de194b42e"
+ test_filename = "test-filename-1.2.3-1.el5.x86_64.rpm"
+ pv = self.pvapi.create(name=test_pkg_name, epoch=test_epoch, version=test_version,
+ release=test_release, arch=test_arch, description=test_description,
+ checksum_type="sha256", checksum=test_checksum, filename=test_filename)
+ # Add this package version to the repo
+ self.rapi.add_package_version(repo["id"], pv)
+ # Lookup repo and confirm new package version was added
+ repo = self.rapi.repository(repo["id"])
+ self.assertTrue(repo["packages"].has_key(test_pkg_name))
+ self.assertTrue(len(repo["packages"][test_pkg_name]) == 1)
+ saved_pkg = repo["packages"][test_pkg_name][0]
+ self.assertTrue(saved_pkg['name'] == test_pkg_name)
+ self.assertTrue(saved_pkg['epoch'] == test_epoch)
+ self.assertTrue(saved_pkg['version'] == test_version)
+ self.assertTrue(saved_pkg['release'] == test_release)
+ self.assertTrue(saved_pkg['arch'] == test_arch)
+ self.assertTrue(saved_pkg['description'] == test_description)
+ self.assertTrue(saved_pkg['checksum'].has_key(test_checksum_type))
+ self.assertTrue(saved_pkg['checksum'][test_checksum_type] == test_checksum)
+ self.assertTrue(saved_pkg['filename'] == test_filename)
+ # Verify we can find this package version through repo api calls
+ pkgs = self.rapi.packages(repo['id'])
+ self.assertTrue(pkgs.has_key(test_pkg_name))
+ self.assertTrue(len(pkgs[test_pkg_name]) == 1)
+ self.assertTrue(pkgs[test_pkg_name][0]['filename'] == test_filename)
+ pkgs = self.rapi.packageversions(repo['id'], test_pkg_name)
+ self.assertTrue(len(pkgs) == 1)
+ self.assertTrue(pkgs[0]['filename'] == test_filename)
+
+ # Remove package version from repo
+ self.rapi.remove_package_version(repo['id'], pv)
+ repo = self.rapi.repository(repo['id'])
+ self.assertTrue(not repo["packages"].has_key(test_pkg_name))
+ # Verify package version from repo
+ found = self.pvapi.packageversion(name=test_pkg_name, epoch=test_epoch,
+ version=test_version, release=test_release, arch=test_arch,
+ filename=test_filename, checksum_type=test_checksum_type,
+ checksum=test_checksum)
+ self.assertTrue(found.count() == 1)
+ # Remove from PackageVersion collection
+ self.pvapi.delete(found[0])
+ # Verify it's deleted
+ found = self.pvapi.packageversion(name=test_pkg_name, epoch=test_epoch,
+ version=test_version, release=test_release, arch=test_arch,
+ filename=test_filename, checksum_type=test_checksum_type,
+ checksum=test_checksum)
+ self.assertTrue(found.count() == 0)
+
def test_package_groups(self):
pkggroup = self.pgapi.create('test-pkg-group-id', 'test-pkg-group-name',
'test-pkg-group-description')
diff --git a/test/unit/test_comps.py b/test/unit/test_comps.py
new file mode 100644
index 0000000..a868805
--- /dev/null
+++ b/test/unit/test_comps.py
@@ -0,0 +1,192 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2010 Red Hat, Inc.
+#
+#
+# This software is licensed to you under the GNU General Public License,
+# version 2 (GPLv2). There is NO WARRANTY for this software, express or
+# implied, including the implied warranties of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
+# along with this software; if not, see
+# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
+#
+# Red Hat trademarks are not licensed under GPLv2. No permission is
+# granted to use or replicate Red Hat trademarks that are incorporated
+# in this software or its documentation.
+#
+import sys
+import os
+srcdir = os.path.abspath(os.path.dirname(__file__)) + "/../../src"
+sys.path.append(srcdir)
+import unittest
+import logging
+
+import yum
+
+import pulp.util
+import pulp.model
+from pulp.api.repo import RepoApi
+from pulp.repo_sync import BaseSynchronizer
+
+class TestComps(unittest.TestCase):
+
+ def setUp(self):
+ config_file = os.path.join(srcdir, "../etc/pulp/pulp.ini")
+ self.config = pulp.util.loadConfig(config_file)
+ self.rapi = RepoApi(self.config)
+ self.rapi.clean()
+
+ def tearDown(self):
+ self.rapi.clean()
+
+
+ def test_import_groups_data(self):
+ repo = self.rapi.create('test_import_groups_data_id',
+ 'test_import_groups_data_id', 'i386',
+ 'yum:http://example.com/')
+ # Parse existing comps.xml
+ compspath = "./data/rhel-i386-server-5/comps.xml"
+ compsfile = open(compspath)
+ base = BaseSynchronizer(self.config)
+ base.import_groups_data(compsfile, repo)
+ # 'repo' object should now contain groups/categories
+ # we need to save it to the db so we can query from it
+ self.rapi.update(repo)
+ # Testing for expected values
+ found = self.rapi.packagegroup(repo['id'], "web-server")
+ self.assertTrue(found != None)
+ self.assertTrue("httpd" in found['mandatory_package_names'])
+ self.assertTrue("mod_auth_kerb" in found['optional_package_names'])
+ self.assertTrue("mod_auth_mysql" in found['optional_package_names'])
+ self.assertTrue("crypto-utils" in found['default_package_names'])
+ self.assertTrue("distcache" in found['default_package_names'])
+ # PackageGroupCategory, look up expected values,
+ found = self.rapi.packagegroupcategory(repo['id'], "BAD_VALUE_NOT_IN_CATEGORY")
+ self.assertTrue(found == None)
+ found = self.rapi.packagegroupcategory(repo['id'], "development")
+ self.assertTrue(found != None)
+
+ def test_basic_comps(self):
+ repo = self.rapi.create('test_comps_id','test_comps_name',
+ 'i386', 'yum:http://example.com/')
+ grp = pulp.model.PackageGroup("groupid1", "groupname1",
+ "description", "user_visible", "display_order", "default"
+ "langonly")
+ grp['mandatory_package_names'] = ["mandatory_package_name1"]
+ grp['optional_package_names'] = ["optional_package_name1"]
+ grp['default_package_names'] = ["default_package_name1"]
+ grp['conditional_package_names'] = {"pkg1":"value pkg1"}
+ grp['translated_name'] = {"a":"value"}
+ grp['translated_description'] = {"b":"value"}
+ self.rapi.update_packagegroup(repo['id'], grp)
+ found = self.rapi.packagegroup(repo['id'], grp['id'])
+ self.assertTrue(found != None)
+ self.assertTrue(found['name'] == 'groupname1')
+ self.assertTrue("mandatory_package_name1" in found['mandatory_package_names'])
+
+ ctg = pulp.model.PackageGroupCategory("categoryid1",
+ "categoryname", "description", "display_order")
+ ctg['packagegroupids'] = ["groupid1"]
+ ctg['translated_name'] = {"a":"name"}
+ ctg['translated_description'] = {"b":"description"}
+ self.rapi.update_packagegroupcategory(repo["id"], ctg)
+ found = self.rapi.packagegroupcategory(repo["id"], ctg["id"])
+ self.assertTrue(found != None)
+ self.assertTrue(found["name"] == "categoryname")
+ self.assertTrue("groupid1" in found["packagegroupids"])
+
+ def broken_intend_this_to_run_full_read_write_out_to_xml(self):
+ """
+ Test full cycle of Groups/Categories, import a comps.xml, parse it
+ modify the entries, then write them out to XML
+ """
+ #TODO: Writing to XML is broken
+ # Parse existing comps.xml
+ compsPath = "./data/rhel-i386-server-5/comps.xml"
+ comps = yum.comps.Comps()
+ comps.add(compsPath)
+ self.assertTrue(len(comps.get_groups()) != 0)
+ self.assertTrue(len(comps.get_categories()) != 0)
+ # Create Groups/Categories from parsed data
+ repo = self.rapi.create('test_comps_id','test_comps_name',
+ 'i386', 'yum:http://example.com/')
+ found = self.rapi.packagegroups(repo['id'])
+ self.assertTrue(len(found) == 0)
+ found = self.rapi.packagegroupcategories(repo['id'])
+ self.assertTrue(len(found) == 0)
+
+ grp_list = []
+ groupids = []
+ for g in comps.get_groups():
+ grp = pulp.model.PackageGroup(g.groupid, g.name,
+ g.description, g.user_visible, g.display_order, g.default,
+ g.langonly)
+ grp['mandatory_package_names'].extend(g.mandatory_packages.keys())
+ grp['optional_package_names'].extend(g.optional_packages.keys())
+ grp['default_package_names'].extend(g.default_packages.keys())
+ grp['conditional_package_names'] = g.conditional_packages
+ grp['translated_name'] = g.translated_name
+ grp['translated_description'] = g.translated_description
+ grp_list.append(grp)
+ groupids.append(grp['id'])
+ self.rapi.update_packagegroups(repo['id'], grp_list)
+ ctg_list = []
+ categoryids = []
+ for c in comps.get_categories():
+ ctg = pulp.model.PackageGroupCategory(c.categoryid,
+ c.name, c.description, c.display_order)
+ groupids = [grp for grp in c.groups]
+ ctg['packagegroupids'].extend(groupids)
+ ctg['translated_name'] = c.translated_name
+ ctg['translated_description'] = c.translated_description
+ ctg_list.append(ctg)
+ categoryids.append(ctg['id'])
+ self.rapi.update_packagegroupcategories(repo['id'], ctg_list)
+ # Lookup data from API calls
+ found = self.rapi.packagegroups(repo['id'])
+ self.assertTrue(len(found) > 0)
+ found = self.rapi.packagegroupcategories(repo['id'])
+ self.assertTrue(len(found) > 0)
+ # PackageGroup, look up expected values,
+ # good values come from known data in rhel-5 comps.xml
+ found = self.rapi.packagegroup(repo['id'], "BAD_VALUE_NOT_IN_GROUP")
+ self.assertTrue(found == None)
+ found = self.rapi.packagegroup(repo['id'], "web-server")
+ self.assertTrue(found != None)
+ self.assertTrue("httpd" in found['mandatory_package_names'])
+ self.assertTrue("mod_auth_kerb" in found['optional_package_names'])
+ self.assertTrue("mod_auth_mysql" in found['optional_package_names'])
+ self.assertTrue("crypto-utils" in found['default_package_names'])
+ self.assertTrue("distcache" in found['default_package_names'])
+ # PackageGroupCategory, look up expected values,
+ found = self.rapi.packagegroupcategory(repo['id'], "BAD_VALUE_NOT_IN_CATEGORY")
+ self.assertTrue(found == None)
+ found = self.rapi.packagegroupcategory(repo['id'], "development")
+ self.assertTrue(found != None)
+ # Test Removal
+ self.rapi.remove_packagegroup(repo['id'], "web-server")
+ found = self.rapi.packagegroup(repo['id'], "web-server")
+ self.assertTrue(found == None)
+ self.rapi.remove_packagegroupcategory(repo['id'], "development")
+ found = self.rapi.packagegroupcategory(repo['id'], "development")
+ self.assertTrue(found == None)
+
+ newComps = yum.comps.Comps()
+ # Look up categories from a repo
+ ctgs = self.rapi.packagegroupcategories(repo["id"])
+ grps = self.rapi.packagegroups(repo["id"])
+
+ for cid in ctgs:
+ category = self.rapi.translate_packagegroupcategory(ctgs[cid])
+ newComps.add_category(category)
+ for gid in grps:
+ pkggrp = self.rapi.translate_packagegroup(grps[gid])
+ newComps.add_group(pkggrp)
+ # Write back to xml
+ xml = newComps.xml()
+ print "Generated XML = %s" % (xml)
+ self.assertTrue(True)
+
+
+
+
_______________________________________________
Pulp-list mailing list
[email protected]
https://www.redhat.com/mailman/listinfo/pulp-list