Hello community,

here is the log from the commit of package python-pycadf for openSUSE:Factory 
checked in at 2014-06-04 18:39:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pycadf (Old)
 and      /work/SRC/openSUSE:Factory/.python-pycadf.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pycadf"

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pycadf/python-pycadf.changes      
2014-04-06 09:55:25.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-pycadf.new/python-pycadf.changes 
2014-06-04 18:39:34.000000000 +0200
@@ -1,0 +2,10 @@
+Tue May 27 07:17:07 UTC 2014 - [email protected]
+
+- update to 0.5.1:
+  * sync oslo
+  * remove token from notifier middleware
+  * Updated from global requirements
+  * reorder documentation
+  * import run_cross_tests.sh from incubator
+
+-------------------------------------------------------------------

Old:
----
  pycadf-0.5.tar.gz

New:
----
  pycadf-0.5.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pycadf.spec ++++++
--- /var/tmp/diff_new_pack.nQoyVV/_old  2014-06-04 18:39:36.000000000 +0200
+++ /var/tmp/diff_new_pack.nQoyVV/_new  2014-06-04 18:39:36.000000000 +0200
@@ -17,7 +17,7 @@
 
 
 Name:           python-pycadf
-Version:        0.5
+Version:        0.5.1
 Release:        0
 Summary:        DMTF Cloud Audit (CADF) data mode
 License:        Apache-2.0

++++++ pycadf-0.5.tar.gz -> pycadf-0.5.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/AUTHORS new/pycadf-0.5.1/AUTHORS
--- old/pycadf-0.5/AUTHORS      2014-04-01 21:31:57.000000000 +0200
+++ new/pycadf-0.5.1/AUTHORS    2014-05-26 17:40:57.000000000 +0200
@@ -1 +1,6 @@
-
+Cyril Roelandt <[email protected]>
+Davanum Srinivas <[email protected]>
+Doug Hellmann <[email protected]>
+Gordon Chung <[email protected]>
+Matt Riedemann <[email protected]>
+Sascha Peilicke <[email protected]>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/ChangeLog new/pycadf-0.5.1/ChangeLog
--- old/pycadf-0.5/ChangeLog    2014-04-01 21:31:57.000000000 +0200
+++ new/pycadf-0.5.1/ChangeLog  2014-05-26 17:40:57.000000000 +0200
@@ -1,6 +1,15 @@
 CHANGES
 =======
 
+0.5.1
+-----
+
+* sync oslo
+* remove token from notifier middleware
+* Updated from global requirements
+* reorder documentation
+* import run_cross_tests.sh from incubator
+
 0.5
 ---
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/PKG-INFO new/pycadf-0.5.1/PKG-INFO
--- old/pycadf-0.5/PKG-INFO     2014-04-01 21:31:57.000000000 +0200
+++ new/pycadf-0.5.1/PKG-INFO   2014-05-26 17:40:57.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pycadf
-Version: 0.5
+Version: 0.5.1
 Summary: CADF Library
 Home-page: https://launchpad.net/pycadf
 Author: OpenStack
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/doc/ext/apidoc.py 
new/pycadf-0.5.1/doc/ext/apidoc.py
--- old/pycadf-0.5/doc/ext/apidoc.py    1970-01-01 01:00:00.000000000 +0100
+++ new/pycadf-0.5.1/doc/ext/apidoc.py  2014-05-26 17:40:11.000000000 +0200
@@ -0,0 +1,41 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import os.path as path
+
+from sphinx import apidoc
+
+
+# NOTE(gordc): pbr will run Sphinx multiple times when it generates
+# documentation. Once for each builder. To run this extension we use the
+# 'builder-inited' hook that fires at the beginning of a Sphinx build.
+# We use ``run_already`` to make sure apidocs are only generated once
+# even if Sphinx is run multiple times.
+run_already = False
+
+
+def run_apidoc(app):
+    global run_already
+    if run_already:
+        return
+    run_already = True
+
+    package_dir = path.abspath(path.join(app.srcdir, '..', '..', 'pycadf'))
+    source_dir = path.join(app.srcdir, 'api')
+    apidoc.main(['apidoc', package_dir, '-f',
+                 '-H', 'pyCADF Modules',
+                 '-o', source_dir])
+
+
+def setup(app):
+    app.connect('builder-inited', run_apidoc)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/doc/source/api/index.rst 
new/pycadf-0.5.1/doc/source/api/index.rst
--- old/pycadf-0.5/doc/source/api/index.rst     2014-04-01 21:31:08.000000000 
+0200
+++ new/pycadf-0.5.1/doc/source/api/index.rst   1970-01-01 01:00:00.000000000 
+0100
@@ -1,8 +0,0 @@
-==============
- API Reference
-==============
-
-.. toctree::
-   :maxdepth: 1
-
-   autoindex
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/doc/source/conf.py 
new/pycadf-0.5.1/doc/source/conf.py
--- old/pycadf-0.5/doc/source/conf.py   2014-04-01 21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/doc/source/conf.py 2014-05-26 17:40:11.000000000 +0200
@@ -13,6 +13,9 @@
 
 import sys, os
 
+# NOTE(gordc): path for Sphinx ext.apidoc
+sys.path.insert(0, os.path.abspath('..'))
+
 # This is required for ReadTheDocs.org, but isn't a bad idea anyway.
 os.environ['DJANGO_SETTINGS_MODULE'] = 'openstack_dashboard.settings'
 
@@ -35,7 +38,8 @@
     'sphinx.ext.coverage',
     'sphinx.ext.pngmath',
     'sphinx.ext.viewcode',
-    'oslosphinx'
+    'oslosphinx',
+    'ext.apidoc'
 ]
 
 # Add any paths that contain templates here, relative to this directory.
@@ -136,7 +140,7 @@
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+#html_static_path = ['_static']
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/doc/source/contributing.rst 
new/pycadf-0.5.1/doc/source/contributing.rst
--- old/pycadf-0.5/doc/source/contributing.rst  1970-01-01 01:00:00.000000000 
+0100
+++ new/pycadf-0.5.1/doc/source/contributing.rst        2014-05-26 
17:40:11.000000000 +0200
@@ -0,0 +1,31 @@
+..
+      Copyright 2014 IBM Corp.
+
+      Licensed under the Apache License, Version 2.0 (the "License"); you may
+      not use this file except in compliance with the License. You may obtain
+      a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+      Unless required by applicable law or agreed to in writing, software
+      distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+      WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+      License for the specific language governing permissions and limitations
+      under the License.
+
+.. _contributing:
+
+=============
+ Contributing
+=============
+
+Contributions to the pyCADF library is welcomed.
+
+Important links:
+
+1. `Github repository`_
+
+2. `Launchpad project`_
+
+.. _Github repository: https://github.com/openstack/pycadf
+.. _Launchpad project: https://launchpad.net/pycadf
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/doc/source/index.rst 
new/pycadf-0.5.1/doc/source/index.rst
--- old/pycadf-0.5/doc/source/index.rst 2014-04-01 21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/doc/source/index.rst       2014-05-26 17:40:11.000000000 
+0200
@@ -35,18 +35,30 @@
 
 .. _CADF (Cloud Audit Data Federation Working Group): 
http://www.dmtf.org/standards/cadf
 
-Table of contents
-=================
+Getting Started
+===============
 
 .. toctree::
-   :maxdepth: 2
+    :maxdepth: 1
 
-   api/index
-   specification/index
-   event_concept
-   middleware
+    event_concept
+    middleware
 
-.. update index
+Developer Documentation
+=======================
+
+.. toctree::
+    :maxdepth: 1
+
+    contributing
+    specification/index
+
+Code Documentation
+==================
+.. toctree::
+   :maxdepth: 1
+
+   api/modules
 
 Indices and tables
 ==================
@@ -54,8 +66,3 @@
 * :ref:`genindex`
 * :ref:`modindex`
 * :ref:`search`
-
-To Do
-=====
-
-.. todolist::
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/openstack-common.conf 
new/pycadf-0.5.1/openstack-common.conf
--- old/pycadf-0.5/openstack-common.conf        2014-04-01 21:31:08.000000000 
+0200
+++ new/pycadf-0.5.1/openstack-common.conf      2014-05-26 17:40:11.000000000 
+0200
@@ -4,4 +4,5 @@
 module=gettextutils
 module=importutils
 module=jsonutils
+script=tools/run_cross_tests.sh
 base=pycadf
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/middleware/notifier.py 
new/pycadf-0.5.1/pycadf/middleware/notifier.py
--- old/pycadf-0.5/pycadf/middleware/notifier.py        2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/middleware/notifier.py      2014-05-26 
17:40:11.000000000 +0200
@@ -88,7 +88,7 @@
         include them.
         """
         return dict((k, v) for k, v in six.iteritems(environ)
-                    if k.isupper())
+                    if k.isupper() and k != 'HTTP_X_AUTH_TOKEN')
 
     @log_and_ignore_error
     def process_request(self, request):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/__init__.py 
new/pycadf-0.5.1/pycadf/openstack/common/__init__.py
--- old/pycadf-0.5/pycadf/openstack/common/__init__.py  2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/__init__.py        2014-05-26 
17:40:11.000000000 +0200
@@ -1,2 +1,17 @@
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
 import six
+
+
 six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/context.py 
new/pycadf-0.5.1/pycadf/openstack/common/context.py
--- old/pycadf-0.5/pycadf/openstack/common/context.py   2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/context.py 2014-05-26 
17:40:11.000000000 +0200
@@ -25,7 +25,7 @@
 
 
 def generate_request_id():
-    return 'req-%s' % str(uuid.uuid4())
+    return b'req-' + str(uuid.uuid4()).encode('ascii')
 
 
 class RequestContext(object):
@@ -98,3 +98,14 @@
             return arg
 
     return None
+
+
+def is_user_context(context):
+    """Indicates if the request context is a normal user."""
+    if not context:
+        return False
+    if context.is_admin:
+        return False
+    if not context.user_id or not context.project_id:
+        return False
+    return True
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/excutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/excutils.py
--- old/pycadf-0.5/pycadf/openstack/common/excutils.py  2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/excutils.py        2014-05-26 
17:40:11.000000000 +0200
@@ -24,7 +24,7 @@
 
 import six
 
-from pycadf.openstack.common.gettextutils import _
+from pycadf.openstack.common.gettextutils import _LE
 
 
 class save_and_reraise_exception(object):
@@ -49,9 +49,22 @@
               decide_if_need_reraise()
               if not should_be_reraised:
                   ctxt.reraise = False
+
+    If another exception occurs and reraise flag is False,
+    the saved exception will not be logged.
+
+    If the caller wants to raise new exception during exception handling
+    he/she sets reraise to False initially with an ability to set it back to
+    True if needed::
+
+      except Exception:
+          with save_and_reraise_exception(reraise=False) as ctxt:
+              [if statements to determine whether to raise a new exception]
+              # Not raising a new exception, so reraise
+              ctxt.reraise = True
     """
-    def __init__(self):
-        self.reraise = True
+    def __init__(self, reraise=True):
+        self.reraise = reraise
 
     def __enter__(self):
         self.type_, self.value, self.tb, = sys.exc_info()
@@ -59,10 +72,11 @@
 
     def __exit__(self, exc_type, exc_val, exc_tb):
         if exc_type is not None:
-            logging.error(_('Original exception being dropped: %s'),
-                          traceback.format_exception(self.type_,
-                                                     self.value,
-                                                     self.tb))
+            if self.reraise:
+                logging.error(_LE('Original exception being dropped: %s'),
+                              traceback.format_exception(self.type_,
+                                                         self.value,
+                                                         self.tb))
             return False
         if self.reraise:
             six.reraise(self.type_, self.value, self.tb)
@@ -88,8 +102,8 @@
                 if (cur_time - last_log_time > 60 or
                         this_exc_message != last_exc_message):
                     logging.exception(
-                        _('Unexpected exception occurred %d time(s)... '
-                          'retrying.') % exc_count)
+                        _LE('Unexpected exception occurred %d time(s)... '
+                            'retrying.') % exc_count)
                     last_log_time = cur_time
                     last_exc_message = this_exc_message
                     exc_count = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/fileutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/fileutils.py
--- old/pycadf-0.5/pycadf/openstack/common/fileutils.py 2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/fileutils.py       2014-05-26 
17:40:11.000000000 +0200
@@ -19,7 +19,6 @@
 import tempfile
 
 from pycadf.openstack.common import excutils
-from pycadf.openstack.common.gettextutils import _
 from pycadf.openstack.common import log as logging
 
 LOG = logging.getLogger(__name__)
@@ -59,7 +58,7 @@
     cache_info = _FILE_CACHE.setdefault(filename, {})
 
     if not cache_info or mtime > cache_info.get('mtime', 0):
-        LOG.debug(_("Reloading cached file %s") % filename)
+        LOG.debug("Reloading cached file %s" % filename)
         with open(filename) as fap:
             cache_info['data'] = fap.read()
         cache_info['mtime'] = mtime
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pycadf-0.5/pycadf/openstack/common/fixture/lockutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/fixture/lockutils.py
--- old/pycadf-0.5/pycadf/openstack/common/fixture/lockutils.py 2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/fixture/lockutils.py       
2014-05-26 17:40:11.000000000 +0200
@@ -48,4 +48,4 @@
     def setUp(self):
         super(LockFixture, self).setUp()
         self.addCleanup(self.mgr.__exit__, None, None, None)
-        self.mgr.__enter__()
+        self.lock = self.mgr.__enter__()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pycadf-0.5/pycadf/openstack/common/fixture/mockpatch.py 
new/pycadf-0.5.1/pycadf/openstack/common/fixture/mockpatch.py
--- old/pycadf-0.5/pycadf/openstack/common/fixture/mockpatch.py 2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/fixture/mockpatch.py       
2014-05-26 17:40:11.000000000 +0200
@@ -15,6 +15,17 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+##############################################################################
+##############################################################################
+##
+## DO NOT MODIFY THIS FILE
+##
+## This file is being graduated to the oslotest library. Please make all
+## changes there, and only backport critical fixes here. - dhellmann
+##
+##############################################################################
+##############################################################################
+
 import fixtures
 import mock
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/pycadf-0.5/pycadf/openstack/common/fixture/moxstubout.py 
new/pycadf-0.5.1/pycadf/openstack/common/fixture/moxstubout.py
--- old/pycadf-0.5/pycadf/openstack/common/fixture/moxstubout.py        
2014-04-01 21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/fixture/moxstubout.py      
2014-05-26 17:40:11.000000000 +0200
@@ -15,8 +15,19 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
+##############################################################################
+##############################################################################
+##
+## DO NOT MODIFY THIS FILE
+##
+## This file is being graduated to the oslotest library. Please make all
+## changes there, and only backport critical fixes here. - dhellmann
+##
+##############################################################################
+##############################################################################
+
 import fixtures
-from six.moves import mox  # noqa
+from six.moves import mox
 
 
 class MoxStubout(fixtures.Fixture):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/gettextutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/gettextutils.py
--- old/pycadf-0.5/pycadf/openstack/common/gettextutils.py      2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/gettextutils.py    2014-05-26 
17:40:11.000000000 +0200
@@ -28,70 +28,135 @@
 import locale
 from logging import handlers
 import os
-import re
 
 from babel import localedata
 import six
 
-_localedir = os.environ.get('pycadf'.upper() + '_LOCALEDIR')
-_t = gettext.translation('pycadf', localedir=_localedir, fallback=True)
-
-# We use separate translation catalogs for each log level, so set up a
-# mapping between the log level name and the translator. The domain
-# for the log level is project_name + "-log-" + log_level so messages
-# for each level end up in their own catalog.
-_t_log_levels = dict(
-    (level, gettext.translation('pycadf' + '-log-' + level,
-                                localedir=_localedir,
-                                fallback=True))
-    for level in ['info', 'warning', 'error', 'critical']
-)
-
 _AVAILABLE_LANGUAGES = {}
-USE_LAZY = False
 
+# FIXME(dhellmann): Remove this when moving to oslo.i18n.
+USE_LAZY = False
 
-def enable_lazy():
-    """Convenience function for configuring _() to use lazy gettext
 
-    Call this at the start of execution to enable the gettextutils._
-    function to use lazy gettext functionality. This is useful if
-    your project is importing _ directly instead of using the
-    gettextutils.install() way of importing the _ function.
+class TranslatorFactory(object):
+    """Create translator functions
     """
-    global USE_LAZY
-    USE_LAZY = True
 
+    def __init__(self, domain, lazy=False, localedir=None):
+        """Establish a set of translation functions for the domain.
 
-def _(msg):
-    if USE_LAZY:
-        return Message(msg, domain='pycadf')
-    else:
+        :param domain: Name of translation domain,
+                       specifying a message catalog.
+        :type domain: str
+        :param lazy: Delays translation until a message is emitted.
+                     Defaults to False.
+        :type lazy: Boolean
+        :param localedir: Directory with translation catalogs.
+        :type localedir: str
+        """
+        self.domain = domain
+        self.lazy = lazy
+        if localedir is None:
+            localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
+        self.localedir = localedir
+
+    def _make_translation_func(self, domain=None):
+        """Return a new translation function ready for use.
+
+        Takes into account whether or not lazy translation is being
+        done.
+
+        The domain can be specified to override the default from the
+        factory, but the localedir from the factory is always used
+        because we assume the log-level translation catalogs are
+        installed in the same directory as the main application
+        catalog.
+
+        """
+        if domain is None:
+            domain = self.domain
+        if self.lazy:
+            return functools.partial(Message, domain=domain)
+        t = gettext.translation(
+            domain,
+            localedir=self.localedir,
+            fallback=True,
+        )
         if six.PY3:
-            return _t.gettext(msg)
-        return _t.ugettext(msg)
+            return t.gettext
+        return t.ugettext
 
+    @property
+    def primary(self):
+        "The default translation function."
+        return self._make_translation_func()
+
+    def _make_log_translation_func(self, level):
+        return self._make_translation_func(self.domain + '-log-' + level)
+
+    @property
+    def log_info(self):
+        "Translate info-level log messages."
+        return self._make_log_translation_func('info')
+
+    @property
+    def log_warning(self):
+        "Translate warning-level log messages."
+        return self._make_log_translation_func('warning')
+
+    @property
+    def log_error(self):
+        "Translate error-level log messages."
+        return self._make_log_translation_func('error')
+
+    @property
+    def log_critical(self):
+        "Translate critical-level log messages."
+        return self._make_log_translation_func('critical')
+
+
+# NOTE(dhellmann): When this module moves out of the incubator into
+# oslo.i18n, these global variables can be moved to an integration
+# module within each application.
 
-def _log_translation(msg, level):
-    """Build a single translation of a log message
-    """
-    if USE_LAZY:
-        return Message(msg, domain='pycadf' + '-log-' + level)
-    else:
-        translator = _t_log_levels[level]
-        if six.PY3:
-            return translator.gettext(msg)
-        return translator.ugettext(msg)
+# Create the global translation functions.
+_translators = TranslatorFactory('pycadf')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
 
 # Translators for log levels.
 #
 # The abbreviated names are meant to reflect the usual use of a short
 # name like '_'. The "L" is for "log" and the other letter comes from
 # the level.
-_LI = functools.partial(_log_translation, level='info')
-_LW = functools.partial(_log_translation, level='warning')
-_LE = functools.partial(_log_translation, level='error')
-_LC = functools.partial(_log_translation, level='critical')
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
+
+# NOTE(dhellmann): End of globals that will move to the application's
+# integration module.
+
+
+def enable_lazy():
+    """Convenience function for configuring _() to use lazy gettext
+
+    Call this at the start of execution to enable the gettextutils._
+    function to use lazy gettext functionality. This is useful if
+    your project is importing _ directly instead of using the
+    gettextutils.install() way of importing the _ function.
+    """
+    # FIXME(dhellmann): This function will be removed in oslo.i18n,
+    # because the TranslatorFactory makes it superfluous.
+    global _, _LI, _LW, _LE, _LC, USE_LAZY
+    tf = TranslatorFactory('pycadf', lazy=True)
+    _ = tf.primary
+    _LI = tf.log_info
+    _LW = tf.log_warning
+    _LE = tf.log_error
+    _LC = tf.log_critical
+    USE_LAZY = True
 
 
 def install(domain, lazy=False):
@@ -113,26 +178,9 @@
                  any available locale.
     """
     if lazy:
-        # NOTE(mrodden): Lazy gettext functionality.
-        #
-        # The following introduces a deferred way to do translations on
-        # messages in OpenStack. We override the standard _() function
-        # and % (format string) operation to build Message objects that can
-        # later be translated when we have more information.
-        def _lazy_gettext(msg):
-            """Create and return a Message object.
-
-            Lazy gettext function for a given domain, it is a factory method
-            for a project/module to get a lazy gettext function for its own
-            translation domain (i.e. nova, glance, cinder, etc.)
-
-            Message encapsulates a string so that we can translate
-            it later when needed.
-            """
-            return Message(msg, domain=domain)
-
         from six import moves
-        moves.builtins.__dict__['_'] = _lazy_gettext
+        tf = TranslatorFactory(domain, lazy=True)
+        moves.builtins.__dict__['_'] = tf.primary
     else:
         localedir = '%s_LOCALEDIR' % domain.upper()
         if six.PY3:
@@ -248,47 +296,22 @@
         if other is None:
             params = (other,)
         elif isinstance(other, dict):
-            params = self._trim_dictionary_parameters(other)
-        else:
-            params = self._copy_param(other)
-        return params
-
-    def _trim_dictionary_parameters(self, dict_param):
-        """Return a dict that only has matching entries in the msgid."""
-        # NOTE(luisg): Here we trim down the dictionary passed as parameters
-        # to avoid carrying a lot of unnecessary weight around in the message
-        # object, for example if someone passes in Message() % locals() but
-        # only some params are used, and additionally we prevent errors for
-        # non-deepcopyable objects by unicoding() them.
-
-        # Look for %(param) keys in msgid;
-        # Skip %% and deal with the case where % is first character on the line
-        keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid)
-
-        # If we don't find any %(param) keys but have a %s
-        if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid):
-            # Apparently the full dictionary is the parameter
-            params = self._copy_param(dict_param)
-        else:
+            # Merge the dictionaries
+            # Copy each item in case one does not support deep copy.
             params = {}
-            # Save our existing parameters as defaults to protect
-            # ourselves from losing values if we are called through an
-            # (erroneous) chain that builds a valid Message with
-            # arguments, and then does something like "msg % kwds"
-            # where kwds is an empty dictionary.
-            src = {}
             if isinstance(self.params, dict):
-                src.update(self.params)
-            src.update(dict_param)
-            for key in keys:
-                params[key] = self._copy_param(src[key])
-
+                for key, val in self.params.items():
+                    params[key] = self._copy_param(val)
+            for key, val in other.items():
+                params[key] = self._copy_param(val)
+        else:
+            params = self._copy_param(other)
         return params
 
     def _copy_param(self, param):
         try:
             return copy.deepcopy(param)
-        except TypeError:
+        except Exception:
             # Fallback to casting to unicode this will handle the
             # python code-like objects that can't be deep-copied
             return six.text_type(param)
@@ -300,13 +323,14 @@
     def __radd__(self, other):
         return self.__add__(other)
 
-    def __str__(self):
-        # NOTE(luisg): Logging in python 2.6 tries to str() log records,
-        # and it expects specifically a UnicodeError in order to proceed.
-        msg = _('Message objects do not support str() because they may '
-                'contain non-ascii characters. '
-                'Please use unicode() or translate() instead.')
-        raise UnicodeError(msg)
+    if six.PY2:
+        def __str__(self):
+            # NOTE(luisg): Logging in python 2.6 tries to str() log records,
+            # and it expects specifically a UnicodeError in order to proceed.
+            msg = _('Message objects do not support str() because they may '
+                    'contain non-ascii characters. '
+                    'Please use unicode() or translate() instead.')
+            raise UnicodeError(msg)
 
 
 def get_available_languages(domain):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/jsonutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/jsonutils.py
--- old/pycadf-0.5/pycadf/openstack/common/jsonutils.py 2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/jsonutils.py       2014-05-26 
17:40:11.000000000 +0200
@@ -35,18 +35,20 @@
 import functools
 import inspect
 import itertools
-import json
-try:
-    import xmlrpclib
-except ImportError:
-    # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3
-    #                 however the function and object call signatures
-    #                 remained the same. This whole try/except block should
-    #                 be removed and replaced with a call to six.moves once
-    #                 six 1.4.2 is released. See http://bit.ly/1bqrVzu
-    import xmlrpc.client as xmlrpclib
+import sys
+
+if sys.version_info < (2, 7):
+    # On Python <= 2.6, json module is not C boosted, so try to use
+    # simplejson module if available
+    try:
+        import simplejson as json
+    except ImportError:
+        import json
+else:
+    import json
 
 import six
+import six.moves.xmlrpc_client as xmlrpclib
 
 from pycadf.openstack.common import gettextutils
 from pycadf.openstack.common import importutils
@@ -168,8 +170,8 @@
     return json.loads(s)
 
 
-def load(s):
-    return json.load(s)
+def load(fp):
+    return json.load(fp)
 
 
 try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/lockutils.py 
new/pycadf-0.5.1/pycadf/openstack/common/lockutils.py
--- old/pycadf-0.5/pycadf/openstack/common/lockutils.py 2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/lockutils.py       2014-05-26 
17:40:11.000000000 +0200
@@ -15,6 +15,7 @@
 
 import contextlib
 import errno
+import fcntl
 import functools
 import os
 import shutil
@@ -28,7 +29,7 @@
 from oslo.config import cfg
 
 from pycadf.openstack.common import fileutils
-from pycadf.openstack.common.gettextutils import _
+from pycadf.openstack.common.gettextutils import _, _LE, _LI
 from pycadf.openstack.common import log as logging
 
 
@@ -37,10 +38,10 @@
 
 util_opts = [
     cfg.BoolOpt('disable_process_locking', default=False,
-                help='Whether to disable inter-process locks'),
+                help='Enables or disables inter-process locks.'),
     cfg.StrOpt('lock_path',
                default=os.environ.get("PYCADF_LOCK_PATH"),
-               help=('Directory to use for lock files.'))
+               help='Directory to use for lock files.')
 ]
 
 
@@ -52,7 +53,7 @@
     cfg.set_defaults(util_opts, lock_path=lock_path)
 
 
-class _InterProcessLock(object):
+class _FileLock(object):
     """Lock implementation which allows multiple locks, working around
     issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
     not require any cleanup. Since the lock is always held on a file
@@ -79,7 +80,7 @@
 
         if not os.path.exists(basedir):
             fileutils.ensure_tree(basedir)
-            LOG.info(_('Created lock path: %s'), basedir)
+            LOG.info(_LI('Created lock path: %s'), basedir)
 
         self.lockfile = open(self.fname, 'w')
 
@@ -90,7 +91,7 @@
                 # Also upon reading the MSDN docs for locking(), it seems
                 # to have a laughable 10 attempts "blocking" mechanism.
                 self.trylock()
-                LOG.debug(_('Got file lock "%s"'), self.fname)
+                LOG.debug('Got file lock "%s"', self.fname)
                 return True
             except IOError as e:
                 if e.errno in (errno.EACCES, errno.EAGAIN):
@@ -114,14 +115,17 @@
         try:
             self.unlock()
             self.lockfile.close()
-            LOG.debug(_('Released file lock "%s"'), self.fname)
+            LOG.debug('Released file lock "%s"', self.fname)
         except IOError:
-            LOG.exception(_("Could not release the acquired lock `%s`"),
+            LOG.exception(_LE("Could not release the acquired lock `%s`"),
                           self.fname)
 
     def __exit__(self, exc_type, exc_val, exc_tb):
         self.release()
 
+    def exists(self):
+        return os.path.exists(self.fname)
+
     def trylock(self):
         raise NotImplementedError()
 
@@ -129,7 +133,7 @@
         raise NotImplementedError()
 
 
-class _WindowsLock(_InterProcessLock):
+class _WindowsLock(_FileLock):
     def trylock(self):
         msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
 
@@ -137,7 +141,7 @@
         msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
 
 
-class _PosixLock(_InterProcessLock):
+class _FcntlLock(_FileLock):
     def trylock(self):
         fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
 
@@ -145,35 +149,106 @@
         fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
 
 
+class _PosixLock(object):
+    def __init__(self, name):
+        # Hash the name because it's not valid to have POSIX semaphore
+        # names with things like / in them. Then use base64 to encode
+        # the digest() instead taking the hexdigest() because the
+        # result is shorter and most systems can't have shm sempahore
+        # names longer than 31 characters.
+        h = hashlib.sha1()
+        h.update(name.encode('ascii'))
+        self.name = str((b'/' + base64.urlsafe_b64encode(
+            h.digest())).decode('ascii'))
+
+    def acquire(self, timeout=None):
+        self.semaphore = posix_ipc.Semaphore(self.name,
+                                             flags=posix_ipc.O_CREAT,
+                                             initial_value=1)
+        self.semaphore.acquire(timeout)
+        return self
+
+    def __enter__(self):
+        self.acquire()
+        return self
+
+    def release(self):
+        self.semaphore.release()
+        self.semaphore.close()
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.release()
+
+    def exists(self):
+        try:
+            semaphore = posix_ipc.Semaphore(self.name)
+        except posix_ipc.ExistentialError:
+            return False
+        else:
+            semaphore.close()
+        return True
+
+
 if os.name == 'nt':
     import msvcrt
     InterProcessLock = _WindowsLock
+    FileLock = _WindowsLock
 else:
-    import fcntl
+    import base64
+    import hashlib
+    import posix_ipc
     InterProcessLock = _PosixLock
+    FileLock = _FcntlLock
 
 _semaphores = weakref.WeakValueDictionary()
 _semaphores_lock = threading.Lock()
 
 
-def external_lock(name, lock_file_prefix=None):
-    with internal_lock(name):
-        LOG.debug(_('Attempting to grab external lock "%(lock)s"'),
-                  {'lock': name})
+def _get_lock_path(name, lock_file_prefix, lock_path=None):
+    # NOTE(mikal): the lock name cannot contain directory
+    # separators
+    name = name.replace(os.sep, '_')
+    if lock_file_prefix:
+        sep = '' if lock_file_prefix.endswith('-') else '-'
+        name = '%s%s%s' % (lock_file_prefix, sep, name)
+
+    local_lock_path = lock_path or CONF.lock_path
+
+    if not local_lock_path:
+        # NOTE(bnemec): Create a fake lock path for posix locks so we don't
+        # unnecessarily raise the RequiredOptError below.
+        if InterProcessLock is not _PosixLock:
+            raise cfg.RequiredOptError('lock_path')
+        local_lock_path = 'posixlock:/'
 
-        # NOTE(mikal): the lock name cannot contain directory
-        # separators
-        name = name.replace(os.sep, '_')
-        if lock_file_prefix:
-            sep = '' if lock_file_prefix.endswith('-') else '-'
-            name = '%s%s%s' % (lock_file_prefix, sep, name)
+    return os.path.join(local_lock_path, name)
 
-        if not CONF.lock_path:
-            raise cfg.RequiredOptError('lock_path')
 
-        lock_file_path = os.path.join(CONF.lock_path, name)
+def external_lock(name, lock_file_prefix=None, lock_path=None):
+    LOG.debug('Attempting to grab external lock "%(lock)s"',
+              {'lock': name})
 
-        return InterProcessLock(lock_file_path)
+    lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
+
+    # NOTE(bnemec): If an explicit lock_path was passed to us then it
+    # means the caller is relying on file-based locking behavior, so
+    # we can't use posix locks for those calls.
+    if lock_path:
+        return FileLock(lock_file_path)
+    return InterProcessLock(lock_file_path)
+
+
+def remove_external_lock_file(name, lock_file_prefix=None):
+    """Remove a external lock file when it's not used anymore
+    This will be helpful when we have a lot of lock files
+    """
+    with internal_lock(name):
+        lock_file_path = _get_lock_path(name, lock_file_prefix)
+        try:
+            os.remove(lock_file_path)
+        except OSError:
+            LOG.info(_LI('Failed to remove file %(file)s'),
+                     {'file': lock_file_path})
 
 
 def internal_lock(name):
@@ -184,12 +259,12 @@
             sem = threading.Semaphore()
             _semaphores[name] = sem
 
-    LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name})
+    LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
     return sem
 
 
 @contextlib.contextmanager
-def lock(name, lock_file_prefix=None, external=False):
+def lock(name, lock_file_prefix=None, external=False, lock_path=None):
     """Context based lock
 
     This function yields a `threading.Semaphore` instance (if we don't use
@@ -201,18 +276,21 @@
 
     :param external: The external keyword argument denotes whether this lock
       should work across multiple processes. This means that if two different
-      workers both run a a method decorated with @synchronized('mylock',
+      workers both run a method decorated with @synchronized('mylock',
       external=True), only one of them will execute at a time.
     """
-    if external and not CONF.disable_process_locking:
-        lock = external_lock(name, lock_file_prefix)
-    else:
-        lock = internal_lock(name)
-    with lock:
-        yield lock
+    int_lock = internal_lock(name)
+    with int_lock:
+        if external and not CONF.disable_process_locking:
+            ext_lock = external_lock(name, lock_file_prefix, lock_path)
+            with ext_lock:
+                yield ext_lock
+        else:
+            yield int_lock
+    LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
 
 
-def synchronized(name, lock_file_prefix=None, external=False):
+def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
     """Synchronization decorator.
 
     Decorating a method like so::
@@ -240,12 +318,12 @@
         @functools.wraps(f)
         def inner(*args, **kwargs):
             try:
-                with lock(name, lock_file_prefix, external):
-                    LOG.debug(_('Got semaphore / lock "%(function)s"'),
+                with lock(name, lock_file_prefix, external, lock_path):
+                    LOG.debug('Got semaphore / lock "%(function)s"',
                               {'function': f.__name__})
                     return f(*args, **kwargs)
             finally:
-                LOG.debug(_('Semaphore / lock released "%(function)s"'),
+                LOG.debug('Semaphore / lock released "%(function)s"',
                           {'function': f.__name__})
         return inner
     return wrap
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf/openstack/common/log.py 
new/pycadf-0.5.1/pycadf/openstack/common/log.py
--- old/pycadf-0.5/pycadf/openstack/common/log.py       2014-04-01 
21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/pycadf/openstack/common/log.py     2014-05-26 
17:40:11.000000000 +0200
@@ -15,7 +15,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
-"""Openstack logging handler.
+"""OpenStack logging handler.
 
 This module adds to logging functionality by adding the option to specify
 a context object when calling the various log methods.  If the context object
@@ -59,7 +59,10 @@
 _FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
                     r'(<%(key)s>).*?(</%(key)s>)',
                     r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
-                    r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])']
+                    r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
+                    r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
+                    '.*?([\'"])',
+                    r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
 
 for key in _SANITIZE_KEYS:
     for pattern in _FORMAT_PATTERNS:
@@ -84,12 +87,10 @@
     cfg.StrOpt('log-config-append',
                metavar='PATH',
                deprecated_name='log-config',
-               help='The name of logging configuration file. It does not '
-                    'disable existing loggers, but just appends specified '
-                    'logging configuration to any other existing logging '
-                    'options. Please see the Python logging module '
-                    'documentation for details on logging configuration '
-                    'files.'),
+               help='The name of a logging configuration file. This file '
+                    'is appended to any existing logging configuration '
+                    'files. For details about logging configuration files, '
+                    'see the Python logging module documentation.'),
     cfg.StrOpt('log-format',
                default=None,
                metavar='FORMAT',
@@ -103,7 +104,7 @@
                default=_DEFAULT_LOG_DATE_FORMAT,
                metavar='DATE_FORMAT',
                help='Format string for %%(asctime)s in log records. '
-                    'Default: %(default)s'),
+                    'Default: %(default)s .'),
     cfg.StrOpt('log-file',
                metavar='PATH',
                deprecated_name='logfile',
@@ -112,30 +113,30 @@
     cfg.StrOpt('log-dir',
                deprecated_name='logdir',
                help='(Optional) The base directory used for relative '
-                    '--log-file paths'),
+                    '--log-file paths.'),
     cfg.BoolOpt('use-syslog',
                 default=False,
                 help='Use syslog for logging. '
                      'Existing syslog format is DEPRECATED during I, '
-                     'and then will be changed in J to honor RFC5424'),
+                     'and will chang in J to honor RFC5424.'),
     cfg.BoolOpt('use-syslog-rfc-format',
                 # TODO(bogdando) remove or use True after existing
                 #    syslog format deprecation in J
                 default=False,
-                help='(Optional) Use syslog rfc5424 format for logging. '
-                     'If enabled, will add APP-NAME (RFC5424) before the '
-                     'MSG part of the syslog message.  The old format '
-                     'without APP-NAME is deprecated in I, '
+                help='(Optional) Enables or disables syslog rfc5424 format '
+                     'for logging. If enabled, prefixes the MSG part of the '
+                     'syslog message with APP-NAME (RFC5424). The '
+                     'format without the APP-NAME is deprecated in I, '
                      'and will be removed in J.'),
     cfg.StrOpt('syslog-log-facility',
                default='LOG_USER',
-               help='Syslog facility to receive log lines')
+               help='Syslog facility to receive log lines.')
 ]
 
 generic_log_opts = [
     cfg.BoolOpt('use_stderr',
                 default=True,
-                help='Log output to standard error')
+                help='Log output to standard error.')
 ]
 
 log_opts = [
@@ -143,18 +144,18 @@
                default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
                        '%(name)s [%(request_id)s %(user_identity)s] '
                        '%(instance)s%(message)s',
-               help='Format string to use for log messages with context'),
+               help='Format string to use for log messages with context.'),
     cfg.StrOpt('logging_default_format_string',
                default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
                        '%(name)s [-] %(instance)s%(message)s',
-               help='Format string to use for log messages without context'),
+               help='Format string to use for log messages without context.'),
     cfg.StrOpt('logging_debug_format_suffix',
                default='%(funcName)s %(pathname)s:%(lineno)d',
-               help='Data to append to log format when level is DEBUG'),
+               help='Data to append to log format when level is DEBUG.'),
     cfg.StrOpt('logging_exception_prefix',
                default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
                '%(instance)s',
-               help='Prefix each line of exception output with this format'),
+               help='Prefix each line of exception output with this format.'),
     cfg.ListOpt('default_log_levels',
                 default=[
                     'amqp=WARN',
@@ -163,28 +164,29 @@
                     'qpid=WARN',
                     'sqlalchemy=WARN',
                     'suds=INFO',
+                    'oslo.messaging=INFO',
                     'iso8601=WARN',
                     'requests.packages.urllib3.connectionpool=WARN'
                 ],
-                help='List of logger=LEVEL pairs'),
+                help='List of logger=LEVEL pairs.'),
     cfg.BoolOpt('publish_errors',
                 default=False,
-                help='Publish error events'),
+                help='Enables or disables publication of error events.'),
     cfg.BoolOpt('fatal_deprecations',
                 default=False,
-                help='Make deprecations fatal'),
+                help='Enables or disables fatal status of deprecations.'),
 
     # NOTE(mikal): there are two options here because sometimes we are handed
     # a full instance (and could include more information), and other times we
     # are just handed a UUID for the instance.
     cfg.StrOpt('instance_format',
                default='[instance: %(uuid)s] ',
-               help='If an instance is passed with the log message, format '
-                    'it like this'),
+               help='The format for an instance that is passed with the log '
+                    'message. '),
     cfg.StrOpt('instance_uuid_format',
                default='[instance: %(uuid)s] ',
-               help='If an instance UUID is passed with the log message, '
-                    'format it like this'),
+               help='The format for an instance UUID that is passed with the '
+                    'log message. '),
 ]
 
 CONF = cfg.CONF
@@ -357,7 +359,7 @@
             extra.update(_dictify_context(context))
 
         instance = kwargs.pop('instance', None)
-        instance_uuid = (extra.get('instance_uuid', None) or
+        instance_uuid = (extra.get('instance_uuid') or
                          kwargs.pop('instance_uuid', None))
         instance_extra = ''
         if instance:
@@ -450,7 +452,7 @@
         logging.config.fileConfig(log_config_append,
                                   disable_existing_loggers=False)
     except moves.configparser.Error as exc:
-        raise LogConfigError(log_config_append, str(exc))
+        raise LogConfigError(log_config_append, six.text_type(exc))
 
 
 def setup(product_name, version='unknown'):
@@ -495,10 +497,16 @@
 class RFCSysLogHandler(logging.handlers.SysLogHandler):
     def __init__(self, *args, **kwargs):
         self.binary_name = _get_binary_name()
-        super(RFCSysLogHandler, self).__init__(*args, **kwargs)
+        # Do not use super() unless type(logging.handlers.SysLogHandler)
+        #  is 'type' (Python 2.7).
+        # Use old style calls, if the type is 'classobj' (Python 2.6)
+        logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
 
     def format(self, record):
-        msg = super(RFCSysLogHandler, self).format(record)
+        # Do not use super() unless type(logging.handlers.SysLogHandler)
+        #  is 'type' (Python 2.7).
+        # Use old style calls, if the type is 'classobj' (Python 2.6)
+        msg = logging.handlers.SysLogHandler.format(self, record)
         msg = self.binary_name + ' ' + msg
         return msg
 
@@ -650,11 +658,11 @@
         # NOTE(sdague): default the fancier formatting params
         # to an empty string so we don't throw an exception if
         # they get used
-        for key in ('instance', 'color'):
+        for key in ('instance', 'color', 'user_identity'):
             if key not in record.__dict__:
                 record.__dict__[key] = ''
 
-        if record.__dict__.get('request_id', None):
+        if record.__dict__.get('request_id'):
             self._fmt = CONF.logging_context_format_string
         else:
             self._fmt = CONF.logging_default_format_string
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf.egg-info/PKG-INFO 
new/pycadf-0.5.1/pycadf.egg-info/PKG-INFO
--- old/pycadf-0.5/pycadf.egg-info/PKG-INFO     2014-04-01 21:31:57.000000000 
+0200
+++ new/pycadf-0.5.1/pycadf.egg-info/PKG-INFO   2014-05-26 17:40:57.000000000 
+0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: pycadf
-Version: 0.5
+Version: 0.5.1
 Summary: CADF Library
 Home-page: https://launchpad.net/pycadf
 Author: OpenStack
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf.egg-info/SOURCES.txt 
new/pycadf-0.5.1/pycadf.egg-info/SOURCES.txt
--- old/pycadf-0.5/pycadf.egg-info/SOURCES.txt  2014-04-01 21:31:57.000000000 
+0200
+++ new/pycadf-0.5.1/pycadf.egg-info/SOURCES.txt        2014-05-26 
17:40:57.000000000 +0200
@@ -10,12 +10,14 @@
 test-requirements.txt
 tox.ini
 doc/Makefile
+doc/ext/__init__.py
+doc/ext/apidoc.py
 doc/source/conf.py
+doc/source/contributing.rst
 doc/source/event_concept.rst
 doc/source/index.rst
 doc/source/middleware.rst
 doc/source/_templates/.placeholder
-doc/source/api/index.rst
 doc/source/images/audit_event.png
 doc/source/images/middleware.png
 doc/source/images/monitor_event.png
@@ -94,4 +96,5 @@
 pycadf/tests/audit/__init__.py
 pycadf/tests/audit/test_api.py
 pycadf/tests/middleware/__init__.py
-pycadf/tests/middleware/test_audit.py
\ No newline at end of file
+pycadf/tests/middleware/test_audit.py
+tools/run_cross_tests.sh
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/pycadf.egg-info/requires.txt 
new/pycadf-0.5.1/pycadf.egg-info/requires.txt
--- old/pycadf-0.5/pycadf.egg-info/requires.txt 2014-04-01 21:31:57.000000000 
+0200
+++ new/pycadf-0.5.1/pycadf.egg-info/requires.txt       2014-05-26 
17:40:57.000000000 +0200
@@ -2,7 +2,8 @@
 iso8601>=0.1.9
 netaddr>=0.7.6
 oslo.config>=1.2.0
-oslo.messaging>=1.3.0a9
+oslo.messaging>=1.3.0
+posix_ipc
 pytz>=2010h
-six>=1.5.2
+six>=1.6.0
 WebOb>=1.2.3
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/requirements.txt 
new/pycadf-0.5.1/requirements.txt
--- old/pycadf-0.5/requirements.txt     2014-04-01 21:31:09.000000000 +0200
+++ new/pycadf-0.5.1/requirements.txt   2014-05-26 17:40:11.000000000 +0200
@@ -2,7 +2,8 @@
 iso8601>=0.1.9
 netaddr>=0.7.6
 oslo.config>=1.2.0
-oslo.messaging>=1.3.0a9
+oslo.messaging>=1.3.0
+posix_ipc
 pytz>=2010h
-six>=1.5.2
+six>=1.6.0
 WebOb>=1.2.3
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/setup.cfg new/pycadf-0.5.1/setup.cfg
--- old/pycadf-0.5/setup.cfg    2014-04-01 21:31:57.000000000 +0200
+++ new/pycadf-0.5.1/setup.cfg  2014-05-26 17:40:57.000000000 +0200
@@ -36,6 +36,9 @@
 [upload_sphinx]
 upload-dir = doc/build/html
 
+[pbr]
+warnerrors = True
+
 [egg_info]
 tag_build = 
 tag_date = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/setup.py new/pycadf-0.5.1/setup.py
--- old/pycadf-0.5/setup.py     2014-04-01 21:31:08.000000000 +0200
+++ new/pycadf-0.5.1/setup.py   2014-05-26 17:40:11.000000000 +0200
@@ -17,6 +17,14 @@
 # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
 import setuptools
 
+# In python < 2.7.4, a lazy loading of package `pbr` will break
+# setuptools if some other modules registered functions in `atexit`.
+# solution from: http://bugs.python.org/issue15881#msg170215
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
+
 setuptools.setup(
     setup_requires=['pbr'],
     pbr=True)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pycadf-0.5/tools/run_cross_tests.sh 
new/pycadf-0.5.1/tools/run_cross_tests.sh
--- old/pycadf-0.5/tools/run_cross_tests.sh     1970-01-01 01:00:00.000000000 
+0100
+++ new/pycadf-0.5.1/tools/run_cross_tests.sh   2014-05-26 17:40:11.000000000 
+0200
@@ -0,0 +1,91 @@
+#!/bin/bash
+#
+# Run cross-project tests
+#
+# Usage:
+#
+#   run_cross_tests.sh project_dir venv
+
+# Fail the build if any command fails
+set -e
+
+project_dir="$1"
+venv="$2"
+
+if [ -z "$project_dir" -o -z "$venv" ]
+then
+    cat - <<EOF
+ERROR: Missing argument(s)
+
+Usage:
+
+  $0 PROJECT_DIR VIRTUAL_ENV
+
+Example, run the python 2.7 tests for python-neutronclient:
+
+  $0 /opt/stack/python-neutronclient py27
+
+EOF
+    exit 1
+fi
+
+# Set up the virtualenv without running the tests
+(cd $project_dir && tox --notest -e $venv)
+
+tox_envbin=$project_dir/.tox/$venv/bin
+
+our_name=$(python setup.py --name)
+
+# Replace the pip-installed package with the version in our source
+# tree. Look to see if we are already installed before trying to
+# uninstall ourselves, to avoid failures from packages that do not use us
+# yet.
+if $tox_envbin/pip freeze | grep -q $our_name
+then
+    $tox_envbin/pip uninstall -y $our_name
+fi
+$tox_envbin/pip install -U .
+
+# Run the tests
+(cd $project_dir && tox -e $venv)
+result=$?
+
+
+# The below checks are modified from
+# openstack-infra/config/modules/jenkins/files/slave_scripts/run-unittests.sh.
+
+# They expect to be run in the project being tested.
+cd $project_dir
+
+echo "Begin pip freeze output from test virtualenv:"
+echo "======================================================================"
+.tox/$venv/bin/pip freeze
+echo "======================================================================"
+
+# We only want to run the next check if the tool is installed, so look
+# for it before continuing.
+if [ -f /usr/local/jenkins/slave_scripts/subunit2html.py -a -d 
".testrepository" ] ; then
+    if [ -f ".testrepository/0.2" ] ; then
+        cp .testrepository/0.2 ./subunit_log.txt
+    elif [ -f ".testrepository/0" ] ; then
+        .tox/$venv/bin/subunit-1to2 < .testrepository/0 > ./subunit_log.txt
+    fi
+    .tox/$venv/bin/python /usr/local/jenkins/slave_scripts/subunit2html.py 
./subunit_log.txt testr_results.html
+    gzip -9 ./subunit_log.txt
+    gzip -9 ./testr_results.html
+
+    export PYTHON=.tox/$venv/bin/python
+    set -e
+    rancount=$(.tox/$venv/bin/testr last | sed -ne 's/Ran \([0-9]\+\).*tests 
in.*/\1/p')
+    if [ "$rancount" -eq "0" ] ; then
+        echo
+        echo "Zero tests were run. At least one test should have been run."
+        echo "Failing this test as a result"
+        echo
+        exit 1
+    fi
+fi
+
+# If we make it this far, report status based on the tests that were
+# run.
+exit $result

-- 
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to