Signed-off-by: Cleber Rosa <[email protected]>
---
client/autotest_local.py | 9 +++++----
client/base_sysinfo.py | 8 +++++---
client/client_logging_config.py | 4 ++--
client/job.py | 9 +++++----
client/job_unittest.py | 9 +++++----
client/shared/base_packages.py | 4 ++--
client/shared/global_config.py | 22 +++++++++++++++++-----
client/shared/hosts/base_classes.py | 8 ++++----
frontend/afe/models.py | 6 +++---
frontend/afe/rpcserver_logging.py | 3 ++-
frontend/settings.py | 7 ++++---
scheduler/drone_manager.py | 9 +++++----
scheduler/drones.py | 5 +++--
scheduler/monitor_db.py | 9 +++++----
scheduler/monitor_db_functional_unittest.py | 2 +-
scheduler/scheduler_config.py | 2 +-
server/autotest_remote.py | 29 +++++++++++++++++------------
server/autotest_remote_unittest.py | 12 ++++++------
server/crashcollect.py | 2 +-
server/hosts/abstract_ssh.py | 4 ++--
server/hosts/factory.py | 2 +-
server/hosts/paramiko_host.py | 2 +-
tko/db.py | 10 +++++-----
23 files changed, 102 insertions(+), 75 deletions(-)
diff --git a/client/autotest_local.py b/client/autotest_local.py
index c3d3315..1b9aab3 100644
--- a/client/autotest_local.py
+++ b/client/autotest_local.py
@@ -55,10 +55,11 @@ class AutotestLocalApp:
def main(self):
self.parse_cmdline()
- drop_caches = global_config.global_config.get_config_value('CLIENT',
-
'drop_caches',
- type=bool,
-
default=True)
+ drop_caches = global_config.global_config.get_config_value(
+ 'CLIENT',
+ 'drop_caches',
+ value_type=bool,
+ default=True)
if self.options.client_test_setup:
from autotest.client import setup_job
diff --git a/client/base_sysinfo.py b/client/base_sysinfo.py
index 9029c77..f187f78 100644
--- a/client/base_sysinfo.py
+++ b/client/base_sysinfo.py
@@ -5,9 +5,11 @@ from autotest.client import utils
GLOBAL_CONFIG = global_config.global_config
-_LOG_INSTALLED_PACKAGES = GLOBAL_CONFIG.get_config_value('CLIENT',
-
'log_installed_packages',
- type=bool,
default=False)
+_LOG_INSTALLED_PACKAGES = GLOBAL_CONFIG.get_config_value(
+ 'CLIENT',
+ 'log_installed_packages',
+ value_type=bool,
+ default=False)
_DEFAULT_COMMANDS_TO_LOG_PER_TEST = []
_DEFAULT_COMMANDS_TO_LOG_PER_BOOT = [
diff --git a/client/client_logging_config.py b/client/client_logging_config.py
index eeaf88c..cbea7d8 100644
--- a/client/client_logging_config.py
+++ b/client/client_logging_config.py
@@ -10,8 +10,8 @@ class ClientLoggingConfig(logging_config.LoggingConfig):
def add_debug_file_handlers(self, log_dir, log_name=None):
if not log_name:
log_name = global_config.global_config.get_config_value(
- 'CLIENT', 'default_logging_name',
- type=str, default='client')
+ 'CLIENT', 'default_logging_name',
+ value_type=str, default='client')
self._add_file_handlers_for_all_levels(log_dir, log_name)
diff --git a/client/job.py b/client/job.py
index 42f5688..214f500 100644
--- a/client/job.py
+++ b/client/job.py
@@ -276,9 +276,10 @@ class base_client_job(base_job.base_job):
Perform the drop caches initialization.
"""
self.drop_caches_between_iterations = (
- GLOBAL_CONFIG.get_config_value('CLIENT',
- 'drop_caches_between_iterations',
- type=bool, default=True))
+ GLOBAL_CONFIG.get_config_value('CLIENT',
+ 'drop_caches_between_iterations',
+ value_type=bool, default=True))
+
self.drop_caches = drop_caches
if self.drop_caches:
utils.drop_caches()
@@ -745,7 +746,7 @@ class base_client_job(base_job.base_job):
"""
abort_on_mismatch = GLOBAL_CONFIG.get_config_value('CLIENT',
'abort_on_mismatch',
- type=bool,
+ value_type=bool,
default=False)
# check to see if any partitions have changed
partition_list = partition_lib.get_partition_list(self,
diff --git a/client/job_unittest.py b/client/job_unittest.py
index cb74ad0..de88a55 100755
--- a/client/job_unittest.py
+++ b/client/job_unittest.py
@@ -584,10 +584,11 @@ class test_base_job(unittest.TestCase):
mount_list = ["/mnt/hda1", "/mnt/hdb1"]
# record
- global_config.get_config_value.expect_call('CLIENT',
- 'abort_on_mismatch',
- default=False,
-
type=bool).and_return(abort_value)
+ global_config.get_config_value.expect_call(
+ 'CLIENT',
+ 'abort_on_mismatch',
+ default=False,
+ value_type=bool).and_return(abort_value)
job.partition_lib.get_partition_list.expect_call(
self.job, exclude_swap=False).and_return(part_list)
for i in xrange(len(part_list)):
diff --git a/client/shared/base_packages.py b/client/shared/base_packages.py
index 97f142f..2bd0bfa 100644
--- a/client/shared/base_packages.py
+++ b/client/shared/base_packages.py
@@ -123,7 +123,7 @@ def check_diskspace(repo, min_free=None):
'''
if min_free is None:
min_free = global_config.global_config.get_config_value(
- 'PACKAGES', 'minimum_free_space', type=int, default=1)
+ 'PACKAGES', 'minimum_free_space', value_type=int, default=1)
try:
df = repo_run_command(repo,
'df -PB %d . | tail -1' % 10 ** 9).stdout.split()
@@ -167,7 +167,7 @@ def trim_custom_directories(repo, older_than_days=None):
if older_than_days is None:
older_than_days = global_config.global_config.get_config_value(
- 'PACKAGES', 'custom_max_age', type=int, default=40)
+ 'PACKAGES', 'custom_max_age', value_type=int, default=40)
cmd = 'find . -type f -atime +%s -exec rm -f {} \;' % older_than_days
repo_run_command(repo, cmd, ignore_status=True)
diff --git a/client/shared/global_config.py b/client/shared/global_config.py
index 9625244..88804d7 100644
--- a/client/shared/global_config.py
+++ b/client/shared/global_config.py
@@ -1,5 +1,5 @@
"""
-A singleton class for accessing global config values
+A class and singleton for accessing global config values
provides access to global configuration file
"""
@@ -81,7 +81,7 @@ else:
RUNNING_STAND_ALONE_CLIENT = True
-class global_config(object):
+class GlobalConfig(object):
'''
A class for accessing global config values
'''
@@ -271,6 +271,18 @@ class global_config(object):
raise ConfigValueError(msg)
-# ensure the class is a singleton. Now the symbol global_config
-# will point to the one and only one instace of the class
-global_config = global_config()
+# provide a singleton that should be used by all users of this module
+# unless there's a very clear reason for not reusing global_config, don't
+# bother and don't attempt to create multiple instances of GlobalConfig()
+global_config = GlobalConfig()
+
+
+def reload():
+ '''
+ Resets the global_config singleton instance
+
+ Useful when the config file itself is modified thus updated configuration
+ needs to be reflected by global_config
+ '''
+ global global_config
+ global_config = GlobalConfig()
diff --git a/client/shared/hosts/base_classes.py
b/client/shared/hosts/base_classes.py
index 12c0181..7bde58f 100644
--- a/client/shared/hosts/base_classes.py
+++ b/client/shared/hosts/base_classes.py
@@ -51,13 +51,13 @@ class Host(object):
job = None
DEFAULT_REBOOT_TIMEOUT = global_config.global_config.get_config_value(
- "HOSTS", "default_reboot_timeout", type=int, default=1800)
+ "HOSTS", "default_reboot_timeout", value_type=int, default=1800)
WAIT_DOWN_REBOOT_TIMEOUT = global_config.global_config.get_config_value(
- "HOSTS", "wait_down_reboot_timeout", type=int, default=840)
+ "HOSTS", "wait_down_reboot_timeout", value_type=int, default=840)
WAIT_DOWN_REBOOT_WARNING = global_config.global_config.get_config_value(
- "HOSTS", "wait_down_reboot_warning", type=int, default=540)
+ "HOSTS", "wait_down_reboot_warning", value_type=int, default=540)
HOURS_TO_WAIT_FOR_RECOVERY = global_config.global_config.get_config_value(
- "HOSTS", "hours_to_wait_for_recovery", type=float, default=2.5)
+ "HOSTS", "hours_to_wait_for_recovery", value_type=float, default=2.5)
# the number of hardware repair requests that need to happen before we
# actually send machines to hardware repair
HARDWARE_REPAIR_REQUEST_THRESHOLD = 4
diff --git a/frontend/afe/models.py b/frontend/afe/models.py
index cad2a4e..3192d88 100644
--- a/frontend/afe/models.py
+++ b/frontend/afe/models.py
@@ -162,7 +162,7 @@ class DroneSet(dbmodels.Model, model_logic.ModelExtensions):
drones: the drones that are part of the set
"""
DRONE_SETS_ENABLED = global_config.global_config.get_config_value(
- 'SCHEDULER', 'drone_sets_enabled', type=bool, default=False)
+ 'SCHEDULER', 'drone_sets_enabled', value_type=bool, default=False)
DEFAULT_DRONE_SET_NAME = global_config.global_config.get_config_value(
'SCHEDULER', 'default_drone_set_name', default=None)
@@ -913,7 +913,7 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
DEFAULT_MAX_RUNTIME_HRS = global_config.global_config.get_config_value(
'AUTOTEST_WEB', 'job_max_runtime_hrs_default', default=72)
DEFAULT_PARSE_FAILED_REPAIR = global_config.global_config.get_config_value(
- 'AUTOTEST_WEB', 'parse_failed_repair_default', type=bool,
+ 'AUTOTEST_WEB', 'parse_failed_repair_default', value_type=bool,
default=False)
Priority = enum.Enum('Low', 'Medium', 'High', 'Urgent')
@@ -962,7 +962,7 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
@classmethod
def parameterized_jobs_enabled(cls):
return global_config.global_config.get_config_value(
- 'AUTOTEST_WEB', 'parameterized_jobs', type=bool)
+ 'AUTOTEST_WEB', 'parameterized_jobs', value_type=bool)
@classmethod
diff --git a/frontend/afe/rpcserver_logging.py
b/frontend/afe/rpcserver_logging.py
index 453255a..71ba35b 100644
--- a/frontend/afe/rpcserver_logging.py
+++ b/frontend/afe/rpcserver_logging.py
@@ -7,7 +7,8 @@ from autotest.client.shared import global_config
config = global_config.global_config
-LOGGING_ENABLED = config.get_config_value('SERVER', 'rpc_logging', type=bool)
+LOGGING_ENABLED = config.get_config_value('SERVER', 'rpc_logging',
+ value_type=bool)
MEGABYTE = 1024 * 1024
diff --git a/frontend/settings.py b/frontend/settings.py
index 9d87252..f5a6aaf 100644
--- a/frontend/settings.py
+++ b/frontend/settings.py
@@ -10,9 +10,10 @@ from autotest.client.shared import global_config
c = global_config.global_config
_section = 'AUTOTEST_WEB'
-DEBUG = c.get_config_value(_section, "sql_debug_mode", type=bool,
default=False)
-TEMPLATE_DEBUG = c.get_config_value(_section, "template_debug_mode", type=bool,
- default=False)
+DEBUG = c.get_config_value(_section, "sql_debug_mode", value_type=bool,
+ default=False)
+TEMPLATE_DEBUG = c.get_config_value(_section, "template_debug_mode",
+ value_type=bool, default=False)
FULL_ADMIN = False
diff --git a/scheduler/drone_manager.py b/scheduler/drone_manager.py
index 172ae11..10503e7 100644
--- a/scheduler/drone_manager.py
+++ b/scheduler/drone_manager.py
@@ -201,7 +201,7 @@ class DroneManager(object):
"""
pidfile_timeout = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'max_pidfile_refreshes',
- type=int, default=2000)
+ value_type=int, default=2000)
return pidfile_timeout
@@ -621,9 +621,10 @@ class DroneManager(object):
if on_results_repository:
base_dir = self._results_dir
else:
- output_dir = global_config.global_config.get_config_value('COMMON',
-
'test_output_dir',
- default="")
+ output_dir = global_config.global_config.get_config_value(
+ 'COMMON',
+ 'test_output_dir',
+ default="")
if output_dir:
base_dir = output_dir
else:
diff --git a/scheduler/drones.py b/scheduler/drones.py
index 4119d3d..fdb1f56 100644
--- a/scheduler/drones.py
+++ b/scheduler/drones.py
@@ -7,8 +7,9 @@ from autotest.scheduler import drone_utility, email_manager
from autotest.client.shared import global_config
-AUTOTEST_INSTALL_DIR =
global_config.global_config.get_config_value('SCHEDULER',
-
'drone_installation_directory')
+AUTOTEST_INSTALL_DIR = global_config.global_config.get_config_value(
+ 'SCHEDULER',
+ 'drone_installation_directory')
class DroneUnreachable(Exception):
"""The drone is non-sshable."""
diff --git a/scheduler/monitor_db.py b/scheduler/monitor_db.py
index 361372a..b6b4c45 100644
--- a/scheduler/monitor_db.py
+++ b/scheduler/monitor_db.py
@@ -75,7 +75,8 @@ _parser_path = _parser_path_func(drones.AUTOTEST_INSTALL_DIR)
def _get_pidfile_timeout_secs():
"""@returns How long to wait for autoserv to write pidfile."""
pidfile_timeout_mins = global_config.global_config.get_config_value(
- scheduler_config.CONFIG_SECTION, 'pidfile_timeout_mins', type=int)
+ scheduler_config.CONFIG_SECTION, 'pidfile_timeout_mins',
+ value_type=int)
return pidfile_timeout_mins * 60
@@ -212,7 +213,7 @@ def main_without_exception_handling():
return
scheduler_enabled = global_config.global_config.get_config_value(
- scheduler_config.CONFIG_SECTION, 'enable_scheduler', type=bool)
+ scheduler_config.CONFIG_SECTION, 'enable_scheduler', value_type=bool)
if not scheduler_enabled:
msg = ("Scheduler not enabled, set enable_scheduler to true in the "
@@ -293,7 +294,7 @@ class Dispatcher(object):
self._seconds_between_garbage_stats = 60 * (
global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION,
- 'gc_stats_interval_mins', type=int, default=6*60))
+ 'gc_stats_interval_mins', value_type=int,
default=6*60))
def initialize(self, recover_hosts=True):
@@ -548,7 +549,7 @@ class Dispatcher(object):
email_manager.manager.enqueue_notify_email(subject, message)
die_on_orphans = global_config.global_config.get_config_value(
- scheduler_config.CONFIG_SECTION, 'die_on_orphans', type=bool)
+ scheduler_config.CONFIG_SECTION, 'die_on_orphans', value_type=bool)
if die_on_orphans:
raise RuntimeError(subject + '\n' + message)
diff --git a/scheduler/monitor_db_functional_unittest.py
b/scheduler/monitor_db_functional_unittest.py
index f7ddf26..fc640e8 100755
--- a/scheduler/monitor_db_functional_unittest.py
+++ b/scheduler/monitor_db_functional_unittest.py
@@ -45,7 +45,7 @@ class MockGlobalConfig(object):
self._config_info[(section, key)] = value
- def get_config_value(self, section, key, type=str,
+ def get_config_value(self, section, key, value_type=str,
default=None, allow_blank=False):
identifier = (section, key)
if identifier not in self._config_info:
diff --git a/scheduler/scheduler_config.py b/scheduler/scheduler_config.py
index 702f94a..5fbfeb7 100644
--- a/scheduler/scheduler_config.py
+++ b/scheduler/scheduler_config.py
@@ -33,7 +33,7 @@ class SchedulerConfig(object):
for field, config_option in self.FIELDS.iteritems():
setattr(self, field, config.get_config_value(CONFIG_SECTION,
config_option,
- type=int))
+ value_type=int))
config = SchedulerConfig()
diff --git a/server/autotest_remote.py b/server/autotest_remote.py
index 14cacd9..e6db7a0 100644
--- a/server/autotest_remote.py
+++ b/server/autotest_remote.py
@@ -10,7 +10,7 @@ from autotest.client.shared import utils as client_utils
get_value = global_config.global_config.get_config_value
autoserv_prebuild = get_value('AUTOSERV', 'enable_server_prebuild',
- type=bool, default=False)
+ value_type=bool, default=False)
class AutodirNotFoundError(Exception):
@@ -51,7 +51,7 @@ class BaseAutotest(installable_object.InstallableObject):
@classmethod
def get_client_autodir_paths(cls, host):
return global_config.global_config.get_config_value(
- 'AUTOSERV', 'client_autodir_paths', type=list)
+ 'AUTOSERV', 'client_autodir_paths', value_type=list)
@classmethod
@@ -130,16 +130,20 @@ class BaseAutotest(installable_object.InstallableObject):
def _create_test_output_dir(self, host, autodir):
tmpdir = os.path.join(autodir, 'tmp')
- state_autodir = global_config.global_config.get_config_value('COMMON',
- 'test_output_dir',
- default=tmpdir)
+ state_autodir = global_config.global_config.get_config_value(
+ 'COMMON',
+ 'test_output_dir',
+ default=tmpdir)
host.run('mkdir -p %s' % utils.sh_escape(state_autodir))
def get_fetch_location(self):
c = global_config.global_config
- repos = c.get_config_value("PACKAGES", 'fetch_location', type=list,
- default=[])
+ repos = c.get_config_value(
+ "PACKAGES",
+ 'fetch_location',
+ value_type=list,
+ default=[])
repos.reverse()
return repos
@@ -283,7 +287,7 @@ class BaseAutotest(installable_object.InstallableObject):
if self.source_material:
c = global_config.global_config
supports_autoserv_packaging = c.get_config_value(
- "PACKAGES", "serve_packages_from_autoserv", type=bool)
+ "PACKAGES", "serve_packages_from_autoserv", value_type=bool)
# Copy autotest recursively
if supports_autoserv_packaging and use_autoserv:
self._install_using_send_file(host, autodir)
@@ -497,9 +501,10 @@ class _BaseRun(object):
control += '.' + tag
tmpdir = os.path.join(self.autodir, 'tmp')
- state_dir = global_config.global_config.get_config_value('COMMON',
-
'test_output_dir',
- default=tmpdir)
+ state_dir = global_config.global_config.get_config_value(
+ 'COMMON',
+ 'test_output_dir',
+ default=tmpdir)
self.manual_control_file = control
self.manual_control_init_state = os.path.join(state_dir,
@@ -1057,7 +1062,7 @@ class client_logger(object):
elif fetch_package_match:
pkg_name, dest_path, fifo_path = fetch_package_match.groups()
serve_packages = global_config.global_config.get_config_value(
- "PACKAGES", "serve_packages_from_autoserv", type=bool)
+ "PACKAGES", "serve_packages_from_autoserv", value_type=bool)
if serve_packages and pkg_name.endswith(".tar.bz2"):
try:
self._send_tarball(pkg_name, dest_path)
diff --git a/server/autotest_remote_unittest.py
b/server/autotest_remote_unittest.py
index 0952216..b2a13d4 100755
--- a/server/autotest_remote_unittest.py
+++ b/server/autotest_remote_unittest.py
@@ -120,7 +120,7 @@ class TestBaseAutotest(unittest.TestCase):
c = autotest_remote.global_config.global_config
c.get_config_value.expect_call('PACKAGES',
'serve_packages_from_autoserv',
- type=bool).and_return(False)
+ value_type=bool).and_return(False)
self.host.send_file.expect_call('source_material', 'autodir',
delete_dest=True)
@@ -140,11 +140,11 @@ class TestBaseAutotest(unittest.TestCase):
c = autotest_remote.global_config.global_config
c.get_config_value.expect_call('PACKAGES',
- 'fetch_location', type=list, default=[]).and_return([])
+ 'fetch_location', value_type=list, default=[]).and_return([])
c.get_config_value.expect_call('PACKAGES',
'serve_packages_from_autoserv',
- type=bool).and_return(True)
+ value_type=bool).and_return(True)
self.base_autotest._install_using_send_file.expect_call(self.host,
'autodir')
tmpdir = 'autodir/tmp'
@@ -162,7 +162,7 @@ class TestBaseAutotest(unittest.TestCase):
c = autotest_remote.global_config.global_config
c.get_config_value.expect_call('PACKAGES',
- 'fetch_location', type=list, default=[]).and_return(['repo'])
+ 'fetch_location', value_type=list, default=[]).and_return(['repo'])
pkgmgr = packages.PackageManager.expect_new('autodir',
repo_urls=['repo'], hostname='hostname', do_locking=False,
run_function=self.host.run, run_function_dargs=dict(timeout=600))
@@ -234,7 +234,7 @@ class TestBaseAutotest(unittest.TestCase):
c = autotest_remote.global_config.global_config
c.get_config_value.expect_call("PACKAGES",
- 'fetch_location', type=list, default=[]).and_return(['repo'])
+ 'fetch_location', value_type=list, default=[]).and_return(['repo'])
pkgmgr = packages.PackageManager.expect_new('autotest',
repo_urls=['repo'],
hostname='hostname')
@@ -331,7 +331,7 @@ class TestBaseAutotest(unittest.TestCase):
c = autotest_remote.global_config.global_config
c.get_config_value.expect_call('PACKAGES',
'serve_packages_from_autoserv',
- type=bool).and_return(True)
+ value_type=bool).and_return(True)
logger._send_tarball.expect_call('pkgname.tar.bz2', '/autotest/dest/')
self.host.run.expect_call('echo B > /autotest/fifo3').and_raises(
diff --git a/server/crashcollect.py b/server/crashcollect.py
index 6ac9fda..d93789d 100644
--- a/server/crashcollect.py
+++ b/server/crashcollect.py
@@ -36,7 +36,7 @@ def get_crashinfo(host, test_start_time):
# Load default for number of hours to wait before giving up on crash
collection.
HOURS_TO_WAIT = global_config.global_config.get_config_value(
- 'SERVER', 'crash_collection_hours_to_wait', type=float, default=4.0)
+ 'SERVER', 'crash_collection_hours_to_wait', value_type=float, default=4.0)
def wait_for_machine_to_recover(host, hours_to_wait=HOURS_TO_WAIT):
diff --git a/server/hosts/abstract_ssh.py b/server/hosts/abstract_ssh.py
index 44511aa..ab0f14e 100644
--- a/server/hosts/abstract_ssh.py
+++ b/server/hosts/abstract_ssh.py
@@ -6,7 +6,7 @@ from autotest.client.shared.global_config import global_config
get_value = global_config.get_config_value
-enable_master_ssh = get_value('AUTOSERV', 'enable_master_ssh', type=bool,
+enable_master_ssh = get_value('AUTOSERV', 'enable_master_ssh', value_type=bool,
default=False)
@@ -534,7 +534,7 @@ class AbstractSSHHost(SiteHost):
# tunable constants for the verify & repair code
AUTOTEST_GB_DISKSPACE_REQUIRED = get_value("SERVER",
"gb_diskspace_required",
- type=int,
+ value_type=int,
default=20)
diff --git a/server/hosts/factory.py b/server/hosts/factory.py
index 74e9c8c..ee40c7f 100644
--- a/server/hosts/factory.py
+++ b/server/hosts/factory.py
@@ -7,7 +7,7 @@ DEFAULT_FOLLOW_PATH = '/var/log/kern.log'
DEFAULT_PATTERNS_PATH = 'console_patterns'
SSH_ENGINE = global_config.global_config.get_config_value('AUTOSERV',
'ssh_engine',
- type=str)
+ value_type=str)
# for tracking which hostnames have already had job_start called
_started_hostnames = set()
diff --git a/server/hosts/paramiko_host.py b/server/hosts/paramiko_host.py
index 821645c..0c8321a 100644
--- a/server/hosts/paramiko_host.py
+++ b/server/hosts/paramiko_host.py
@@ -89,7 +89,7 @@ class ParamikoHost(abstract_ssh.AbstractSSHHost):
# load up all the ssh agent keys
use_sshagent = global_config.global_config.get_config_value(
- 'AUTOSERV', 'use_sshagent_with_paramiko', type=bool)
+ 'AUTOSERV', 'use_sshagent_with_paramiko', value_type=bool)
if use_sshagent:
ssh_agent = paramiko.Agent()
for i, key in enumerate(ssh_agent.get_keys()):
diff --git a/tko/db.py b/tko/db.py
index 2b7ad04..257d3ac 100644
--- a/tko/db.py
+++ b/tko/db.py
@@ -65,11 +65,11 @@ class db_sql(object):
# grab the timeout configuration
self.query_timeout = get_value("AUTOTEST_WEB", "query_timeout",
- type=int, default=3600)
- self.min_delay = get_value("AUTOTEST_WEB", "min_retry_delay", type=int,
- default=20)
- self.max_delay = get_value("AUTOTEST_WEB", "max_retry_delay", type=int,
- default=60)
+ value_type=int, default=3600)
+ self.min_delay = get_value("AUTOTEST_WEB", "min_retry_delay",
+ value_type=int, default=20)
+ self.max_delay = get_value("AUTOTEST_WEB", "max_retry_delay",
+ type=int, default=60)
def _init_db(self):
--
1.7.11.7
_______________________________________________
Autotest-kernel mailing list
[email protected]
https://www.redhat.com/mailman/listinfo/autotest-kernel