Example run client/tests/sleeptest.
test.runsubtest("sleeptest", **args)
args is dictionary with parameters for subtest.
For sleeptest looks like { "seconds":5 }.
This args says sleeptest sleep 5 seconds.
This patch are necessary to avoid of creation double version of test.
netperf, multicast, etc..
Make patch of tests_base.cfs.sample in correct way.
Signed-off-by: Jiří Župka <[email protected]>
---
client/bin/client_logging_config.py | 5 +-
client/bin/net/net_utils.py | 16 ++++-
client/common_lib/base_job.py | 2 +
client/common_lib/logging_config.py | 3 +-
client/common_lib/test.py | 21 ++++++-
client/tests/kvm/tests/subtest.py | 19 +++++
client/tests/kvm/tests_base.cfg.sample | 6 ++
client/tests/netperf2/netperf2.py | 3 +-
client/tools/html_report.py | 115 ++++++++++++++++++--------------
client/virt/virt_test_utils.py | 19 ++++--
10 files changed, 148 insertions(+), 61 deletions(-)
create mode 100644 client/tests/kvm/tests/subtest.py
diff --git a/client/bin/client_logging_config.py
b/client/bin/client_logging_config.py
index a59b078..28c007d 100644
--- a/client/bin/client_logging_config.py
+++ b/client/bin/client_logging_config.py
@@ -12,8 +12,9 @@ class ClientLoggingConfig(logging_config.LoggingConfig):
def configure_logging(self, results_dir=None, verbose=False):
- super(ClientLoggingConfig, self).configure_logging(use_console=True,
- verbose=verbose)
+ super(ClientLoggingConfig, self).configure_logging(
+ use_console=self.use_console,
+ verbose=verbose)
if results_dir:
log_dir = os.path.join(results_dir, 'debug')
diff --git a/client/bin/net/net_utils.py b/client/bin/net/net_utils.py
index 868958c..ac9b494 100644
--- a/client/bin/net/net_utils.py
+++ b/client/bin/net/net_utils.py
@@ -5,7 +5,7 @@ This library is to release in the public repository.
import commands, os, re, socket, sys, time, struct
from autotest_lib.client.common_lib import error
-import utils
+from autotest_lib.client.common_lib import utils
TIMEOUT = 10 # Used for socket timeout and barrier timeout
@@ -27,6 +27,20 @@ class network_utils(object):
utils.system('/sbin/ifconfig -a')
+ def get_corespond_local_ip(self, query_ip, netmask="24"):
+ """
+ Get ip address in local system which can communicate with quert_ip.
+
+ @param query_ip: IP of client which want communicate with autotest
machine.
+ @return: IP address which can communicate with query_ip
+ """
+ ip = utils.system_output("ip addr show to %s/%s" % (query_ip, netmask))
+ ip = re.search(r"inet ([0-9.]*)/",ip)
+ if ip is None:
+ return ip
+ return ip.group(1)
+
+
def disable_ip_local_loopback(self, ignore_status=False):
utils.system("echo '1' > /proc/sys/net/ipv4/route/no_local_loopback",
ignore_status=ignore_status)
diff --git a/client/common_lib/base_job.py b/client/common_lib/base_job.py
index 843c0e8..eef9efc 100644
--- a/client/common_lib/base_job.py
+++ b/client/common_lib/base_job.py
@@ -1117,6 +1117,7 @@ class base_job(object):
tag_parts = []
# build up the parts of the tag used for the test name
+ master_testpath = dargs.get('master_testpath', "")
base_tag = dargs.pop('tag', None)
if base_tag:
tag_parts.append(str(base_tag))
@@ -1132,6 +1133,7 @@ class base_job(object):
if subdir_tag:
tag_parts.append(subdir_tag)
subdir = '.'.join([testname] + tag_parts)
+ subdir = os.path.join(master_testpath, subdir)
tag = '.'.join(tag_parts)
return full_testname, subdir, tag
diff --git a/client/common_lib/logging_config.py
b/client/common_lib/logging_config.py
index afe754a..9114d7a 100644
--- a/client/common_lib/logging_config.py
+++ b/client/common_lib/logging_config.py
@@ -32,9 +32,10 @@ class LoggingConfig(object):
fmt='%(asctime)s %(levelname)-5.5s| %(message)s',
datefmt='%H:%M:%S')
- def __init__(self):
+ def __init__(self, use_console=True):
self.logger = logging.getLogger()
self.global_level = logging.DEBUG
+ self.use_console = use_console
@classmethod
diff --git a/client/common_lib/test.py b/client/common_lib/test.py
index c55d23b..b1a0904 100644
--- a/client/common_lib/test.py
+++ b/client/common_lib/test.py
@@ -465,6 +465,24 @@ class base_test(object):
self.job.enable_warnings("NETWORK")
+ def runsubtest(self, url, *args, **dargs):
+ """
+ This call subtest in running test.
+
+ @param test: Parent test.
+ @param url: Url of new test.
+ @param tag: Tag added to test name.
+ @param args: Args for subtest.
+ @param dargs: Distionary args for subtest.
+ @iterations: Number of iteration of subtest.
+ @profile_inly: If true not profile.
+ """
+ dargs["profile_only"] = dargs.get("profile_only", True)
+ test_basepath = self.outputdir[len(self.job.resultdir + "/"):]
+ self.job.run_test(url, master_testpath=test_basepath,
+ *args, **dargs)
+
+
def _get_nonstar_args(func):
"""Extract all the (normal) function parameter names.
@@ -658,7 +676,8 @@ def runtest(job, url, tag, args, dargs,
if not bindir:
raise error.TestError(testname + ': test does not exist')
- outputdir = os.path.join(job.resultdir, testname)
+ subdir = os.path.join(dargs.pop('master_testpath', ""), testname)
+ outputdir = os.path.join(job.resultdir, subdir)
if tag:
outputdir += '.' + tag
diff --git a/client/tests/kvm/tests/subtest.py
b/client/tests/kvm/tests/subtest.py
new file mode 100644
index 0000000..c2baadc
--- /dev/null
+++ b/client/tests/kvm/tests/subtest.py
@@ -0,0 +1,19 @@
+import os, logging
+
+
+def run_subtest(test, params, env):
+ """
+ Run an autotest test inside a guest and subtest on host side.
+ This test should be substitution netperf test in kvm.
+
+ @param test: kvm test object.
+ @param params: Dictionary with test parameters.
+ @param env: Dictionary with the test environment.
+ """
+
+ # Collect test parameters
+ test_control_file = params.get("test_control_file")
+ args = eval(params.get("test_control_args"))
+
+ # Run subtest with args.
+ test.runsubtest(test_control_file, **args)
diff --git a/client/tests/kvm/tests_base.cfg.sample
b/client/tests/kvm/tests_base.cfg.sample
index 810a4bd..c7c05a5 100644
--- a/client/tests/kvm/tests_base.cfg.sample
+++ b/client/tests/kvm/tests_base.cfg.sample
@@ -261,6 +261,12 @@ variants:
- systemtap:
test_control_file = systemtap.control
+ - subtest: install setup unattended_install.cdrom
+ type = subtest
+ test_timeout = 1800
+ test_control_file = sleeptest
+ test_control_args = {"seconds": 5}
+
- linux_s3: install setup unattended_install.cdrom
only Linux
type = linux_s3
diff --git a/client/tests/netperf2/netperf2.py
b/client/tests/netperf2/netperf2.py
index 1b659dd..23d25c5 100644
--- a/client/tests/netperf2/netperf2.py
+++ b/client/tests/netperf2/netperf2.py
@@ -2,6 +2,7 @@ import os, time, re, logging
from autotest_lib.client.bin import test, utils
from autotest_lib.client.bin.net import net_utils
from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import barrier
MPSTAT_IX = 0
NETPERF_IX = 1
@@ -36,7 +37,7 @@ class netperf2(test.test):
def run_once(self, server_ip, client_ip, role, test = 'TCP_STREAM',
test_time = 15, stream_list = [1], test_specific_args = '',
- cpu_affinity = '', dev = '', bidi = False, wait_time = 5):
+ cpu_affinity = '', dev = '', bidi = False, wait_time = 2):
"""
server_ip: IP address of host running netserver
client_ip: IP address of host running netperf client(s)
diff --git a/client/tools/html_report.py b/client/tools/html_report.py
index 7b17a75..563a7a9 100755
--- a/client/tools/html_report.py
+++ b/client/tools/html_report.py
@@ -1372,7 +1372,7 @@ function processList(ul) {
}
"""
-stimelist = []
+
def make_html_file(metadata, results, tag, host, output_file_name, dirname):
@@ -1430,11 +1430,12 @@ return true;
total_failed = 0
total_passed = 0
for res in results:
- total_executed += 1
- if res['status'] == 'GOOD':
- total_passed += 1
- else:
- total_failed += 1
+ if results[res][2] != None:
+ total_executed += 1
+ if results[res][2]['status'] == 'GOOD':
+ total_passed += 1
+ else:
+ total_failed += 1
stat_str = 'No test cases executed'
if total_executed > 0:
failed_perct = int(float(total_failed)/float(total_executed)*100)
@@ -1471,39 +1472,46 @@ id="t1" class="stats table-autosort:4 table-autofilter
table-stripeclass:alterna
<tbody>
"""
print >> output, result_table_prefix
- for res in results:
- print >> output, '<tr>'
- print >> output, '<td align="left">%s</td>' % res['time']
- print >> output, '<td align="left">%s</td>' % res['testcase']
- if res['status'] == 'GOOD':
- print >> output, '<td align=\"left\"><b><font
color="#00CC00">PASS</font></b></td>'
- elif res['status'] == 'FAIL':
- print >> output, '<td align=\"left\"><b><font
color="red">FAIL</font></b></td>'
- elif res['status'] == 'ERROR':
- print >> output, '<td align=\"left\"><b><font
color="red">ERROR!</font></b></td>'
- else:
- print >> output, '<td align=\"left\">%s</td>' % res['status']
- # print exec time (seconds)
- print >> output, '<td align="left">%s</td>' % res['exec_time_sec']
- # print log only if test failed..
- if res['log']:
- #chop all '\n' from log text (to prevent html errors)
- rx1 = re.compile('(\s+)')
- log_text = rx1.sub(' ', res['log'])
-
- # allow only a-zA-Z0-9_ in html title name
- # (due to bug in MS-explorer)
- rx2 = re.compile('([^a-zA-Z_0-9])')
- updated_tag = rx2.sub('_', res['title'])
-
- html_body_text =
'<html><head><title>%s</title></head><body>%s</body></html>' %
(str(updated_tag), log_text)
- print >> output, '<td align=\"left\"><A HREF=\"#\"
onClick=\"popup(\'%s\',\'%s\')\">Info</A></td>' % (str(updated_tag),
str(html_body_text))
- else:
- print >> output, '<td align=\"left\"></td>'
- # print execution time
- print >> output, '<td align="left"><A HREF=\"%s\">Debug</A></td>' %
os.path.join(dirname, res['title'], "debug")
+ def print_result(result, indent):
+ while result != []:
+ r = result.pop(0)
+ print r
+ res = results[r][2]
+ print >> output, '<tr>'
+ print >> output, '<td align="left">%s</td>' % res['time']
+ print >> output, '<td align="left"
style="padding-left:%dpx">%s</td>' % (indent * 20, res['title'])
+ if res['status'] == 'GOOD':
+ print >> output, '<td align=\"left\"><b><font
color="#00CC00">PASS</font></b></td>'
+ elif res['status'] == 'FAIL':
+ print >> output, '<td align=\"left\"><b><font
color="red">FAIL</font></b></td>'
+ elif res['status'] == 'ERROR':
+ print >> output, '<td align=\"left\"><b><font
color="red">ERROR!</font></b></td>'
+ else:
+ print >> output, '<td align=\"left\">%s</td>' % res['status']
+ # print exec time (seconds)
+ print >> output, '<td align="left">%s</td>' % res['exec_time_sec']
+ # print log only if test failed..
+ if res['log']:
+ #chop all '\n' from log text (to prevent html errors)
+ rx1 = re.compile('(\s+)')
+ log_text = rx1.sub(' ', res['log'])
+
+ # allow only a-zA-Z0-9_ in html title name
+ # (due to bug in MS-explorer)
+ rx2 = re.compile('([^a-zA-Z_0-9])')
+ updated_tag = rx2.sub('_', res['title'])
+
+ html_body_text =
'<html><head><title>%s</title></head><body>%s</body></html>' %
(str(updated_tag), log_text)
+ print >> output, '<td align=\"left\"><A HREF=\"#\"
onClick=\"popup(\'%s\',\'%s\')\">Info</A></td>' % (str(updated_tag),
str(html_body_text))
+ else:
+ print >> output, '<td align=\"left\"></td>'
+ # print execution time
+ print >> output, '<td align="left"><A HREF=\"%s\">Debug</A></td>'
% os.path.join(dirname, res['subdir'], "debug")
- print >> output, '</tr>'
+ print >> output, '</tr>'
+ print_result(results[r][1], indent + 1)
+
+ print_result(results[""][1], 0)
print >> output, "</tbody></table>"
@@ -1531,21 +1539,27 @@ id="t1" class="stats table-autosort:4 table-autofilter
table-stripeclass:alterna
output.close()
-def parse_result(dirname, line):
+def parse_result(dirname, line, results_data):
"""
Parse job status log line.
@param dirname: Job results dir
@param line: Status log line.
+ @param results_data: Dictionary with for results.
"""
parts = line.split()
if len(parts) < 4:
return None
- global stimelist
+ global tests
if parts[0] == 'START':
pair = parts[3].split('=')
stime = int(pair[1])
- stimelist.append(stime)
+ results_data[parts[1]] = [stime, [], None]
+ try:
+ parent_test = re.findall(r".*/", parts[1])[0][:-1]
+ results_data[parent_test][1].append(parts[1])
+ except IndexError:
+ results_data[""][1].append(parts[1])
elif (parts[0] == 'END'):
result = {}
@@ -1562,21 +1576,25 @@ def parse_result(dirname, line):
result['exec_time_sec'] = 'na'
tag = parts[3]
+ result['subdir'] = parts[2]
# assign actual values
rx = re.compile('^(\w+)\.(.*)$')
m1 = rx.findall(parts[3])
- result['testcase'] = str(tag)
+ if len(m1):
+ result['testcase'] = m1[0][1]
+ else:
+ result['testcase'] = parts[3]
result['title'] = str(tag)
result['status'] = parts[1]
if result['status'] != 'GOOD':
result['log'] = get_exec_log(dirname, tag)
- if len(stimelist)>0:
+ if len(results_data)>0:
pair = parts[4].split('=')
etime = int(pair[1])
- stime = stimelist.pop()
+ stime = results_data[parts[2]][0]
total_exec_time_sec = etime - stime
result['exec_time_sec'] = total_exec_time_sec
- return result
+ results_data[parts[2]][2] = result
return None
@@ -1699,16 +1717,15 @@ def create_report(dirname, html_path='',
output_file_name=None):
host = get_info_file(os.path.join(sysinfo_dir, 'hostname'))
rx = re.compile('^\s+[END|START].*$')
# create the results set dict
- results_data = []
+ results_data = {}
+ results_data[""] = [0, [], None]
if os.path.exists(status_file_name):
f = open(status_file_name, "r")
lines = f.readlines()
f.close()
for line in lines:
if rx.match(line):
- result_dict = parse_result(dirname, line)
- if result_dict:
- results_data.append(result_dict)
+ parse_result(dirname, line, results_data)
# create the meta info dict
metalist = {
'uname': get_info_file(os.path.join(sysinfo_dir, 'uname')),
diff --git a/client/virt/virt_test_utils.py b/client/virt/virt_test_utils.py
index e3a18d2..556d3e5 100644
--- a/client/virt/virt_test_utils.py
+++ b/client/virt/virt_test_utils.py
@@ -430,13 +430,15 @@ def get_memory_info(lvms):
return meminfo
-def run_autotest(vm, session, control_path, timeout, outputdir, params):
+def run_autotest(vm, session, control_path, control_args, timeout, outputdir,
+ params):
"""
Run an autotest control file inside a guest (linux only utility).
@param vm: VM object.
@param session: A shell session on the VM provided.
@param control_path: A path to an autotest control file.
+ @param control_args: An argumets for control file.
@param timeout: Timeout under which the autotest control file must
complete.
@param outputdir: Path on host where we should copy the guest autotest
results to.
@@ -561,6 +563,10 @@ def run_autotest(vm, session, control_path, timeout,
outputdir, params):
pass
try:
bg = None
+ if control_args != None:
+ control_args = ' -a "' + control_args + '"'
+ else:
+ control_args = ""
try:
logging.info("---------------- Test output ----------------")
if migrate_background:
@@ -568,7 +574,8 @@ def run_autotest(vm, session, control_path, timeout,
outputdir, params):
mig_protocol = params.get("migration_protocol", "tcp")
bg = virt_utils.Thread(session.cmd_output,
- kwargs={'cmd': "bin/autotest control",
+ kwargs={'cmd': "bin/autotest control" +
+ control_args,
'timeout': timeout,
'print_func': logging.info})
@@ -579,8 +586,8 @@ def run_autotest(vm, session, control_path, timeout,
outputdir, params):
"migration ...")
vm.migrate(timeout=mig_timeout, protocol=mig_protocol)
else:
- session.cmd_output("bin/autotest control", timeout=timeout,
- print_func=logging.info)
+ session.cmd_output("bin/autotest control" + control_args,
+ timeout=timeout, print_func=logging.info)
finally:
logging.info("------------- End of test output ------------")
if migrate_background and bg:
@@ -624,8 +631,8 @@ def run_autotest(vm, session, control_path, timeout,
outputdir, params):
def get_loss_ratio(output):
"""
- Get the packet loss ratio from the output of ping
-.
+ Get the packet loss ratio from the output of ping.
+
@param output: Ping output.
"""
try:
--
1.7.4.4
--
To unsubscribe from this list: send the line "unsubscribe kvm" in
the body of a message to [email protected]
More majordomo info at http://vger.kernel.org/majordomo-info.html