On 28/08/14 23:35, Dylan Baker wrote:
This adds a mostly compliant JUnit backend. It currently passes all of
its tests and jenkins happily consumes the xml we produce.

This needs to have some refactoring done in profile to make it 100%
compliant with the junit-7.xsd from upstream jenkins, (although I would
be comfortable for now pushing with the known non-optimal behavior if
Jenkins will accept it), because JUnit expects to be given the number of
tests for the initial metadata block, but we have no way to calculate
that number until after the run has started.  This is because of the
flattening pass in profile that flattens the nested directory structure
into a flat dictionary.

There are two options to solve this problem:
1) Flatten all.py and other modules. This is a lot of work and I have
    many work-in-progress branches to do just hat
2) Push the pass out to a public method and call it ahead of time. This
    seems really hacky to me, and I'd rather not do something that ugly.

Currently this patch just passes 0 for the test count unconditionally,
jenkins does not seem to have a problem with this.

This includes JUnit.xsd from the jenkins svn repository for piglit
framework unit testing. This is only used in the piglit python framework
unit tests.

Signed-off-by: Dylan Baker <[email protected]>
---
  framework/programs/run.py          |   4 ++
  framework/results.py               |  65 ++++++++++++++++++++++-
  framework/tests/results_tests.py   |  91 ++++++++++++++++++++++++++++++++
  framework/tests/schema/junit-7.xsd | 104 +++++++++++++++++++++++++++++++++++++
  framework/tests/utils.py           |  22 ++++++++
  5 files changed, 285 insertions(+), 1 deletion(-)
  create mode 100644 framework/tests/schema/junit-7.xsd

diff --git a/framework/programs/run.py b/framework/programs/run.py
index 8b7045d..82fc797 100644
--- a/framework/programs/run.py
+++ b/framework/programs/run.py
@@ -221,6 +221,10 @@ def run(input_):
          options['platform'] = args.platform
      options['name'] = results.name
      options['env'] = core.collect_system_info()
+    # FIXME: this should be the actual count, but profile needs to be
+    # refactored to make that possible because of the flattening pass that is
+    # part of profile.run
+    options['test_count'] = 0

I don't think this attribute is actually necessary in practice. Jenkins will count tests when it processes the JUnit XML and stores it internally.



      # Begin json.
      backend = framework.results.get_backend(args.backend)(
diff --git a/framework/results.py b/framework/results.py
index 741adc9..eabeb4d 100644
--- a/framework/results.py
+++ b/framework/results.py
@@ -26,11 +26,16 @@ import os
  import sys
  import abc
  import threading
+import posixpath
  from cStringIO import StringIO
  try:
      import simplejson as json
  except ImportError:
      import json
+try:
+    from lxml import etree
+except ImportError:
+    import xml.etree.cElementTree as etree

  import framework.status as status

@@ -43,7 +48,7 @@ __all__ = [
  ]

  # A list of available backends
-BACKENDS = ['json']
+BACKENDS = ['json', 'junit']

  # The current version of the JSON results
  CURRENT_JSON_VERSION = 1
@@ -349,6 +354,63 @@ class JSONBackend(Backend):
              self._write_dict_item(name, data)


+class JUnitBackend(Backend):
+    """ Backend that produces ANT JUnit XML
+
+    Based on the following schema:
+    
https://urldefense.proofpoint.com/v1/url?u=https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-7.xsd&k=oIvRg1%2BdGAgOoM1BIlLLqw%3D%3D%0A&r=NMr9uy2iTjWVixC0wOcYCWEIYhfo80qKwRgdodpoDzA%3D%0A&m=214XzeIU6H5%2BhNCoizqfg3xjfdXgSseG1Fn0SVHv9oY%3D%0A&s=3ac0b84d76100043ccbe5216c44bbb5b14e24e3115f989ee5303fb6c246d78ff
+
+    """
+    # TODO: add fsync support
+
+    def __init__(self, dest, metadata, **options):
+        self._file = open(os.path.join(dest, 'results.xml'), 'w')
+
+        # Write initial headers and other data that etree cannot write for us
+        self._file.write('<?xml version="1.0" encoding="UTF-8" ?>\n')
+        self._file.write('<testsuites>\n')
+        self._file.write(
+            '<testsuite name="piglit" tests="{}">\n'.format(
+                metadata['test_count']))
+
+    def finalize(self, metadata=None):
+        self._file.write('</testsuite>\n')
+        self._file.write('</testsuites>\n')
+        self._file.close()

I'm not sure if there isn't one already, but you might want to add a "try: ... finally: " construct at the top level to ensure that XML is always valid, even if something bad happens halfway through.

+
+    def write_test(self, name, data):
+        # Split the name of the test and the group (what junit refers to as
+        # classname), and replace piglits '/' seperated groups with '.', after
+        # replacing any '.' with '_' (so we don't get false groups)
+        classname, testname = posixpath.split(name)
+        assert classname
+        assert testname
+        classname = classname.replace('.', '_').replace('/', '.')

We might need to replace('\\', '.') too, for Windows' sake.

+        element = etree.Element('testcase', name=testname, classname=classname,
+                                time=str(data['time']),
+                                status=str(data['result']))
+
+        # Add stdout
+        out = etree.SubElement(element, 'system-out')
+        out.text = data['out']
+
+        # Add stderr
+        err = etree.SubElement(element, 'system-err')
+        err.text = data['err']
+
+        # Add relavent result value, if the result is pass then it doesn't need

relevant

+        # one of these statuses
+        if data['result'] == 'skip':
+            etree.SubElement(element, 'skipped')
+        elif data['result'] in ['warn', 'fail', 'dmesg-warn', 'dmesg-fail']:
+            etree.SubElement(element, 'failure')
[...]

Otherwise looks good.

Jose
_______________________________________________
Piglit mailing list
[email protected]
http://lists.freedesktop.org/mailman/listinfo/piglit

Reply via email to