look at proper integration of API key Diff comments:
> diff --git > a/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest > > b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest > new file mode 100755 > index 0000000..fd1f1f3 > --- /dev/null > +++ > b/charms/focal/autopkgtest-cloud-worker/autopkgtest-cloud/tools/test-run-autopkgtest > @@ -0,0 +1,326 @@ > +#!/usr/bin/python3 > + > + > +import configparser > +import datetime > +import json > +import logging > +import os > +import socket > +import subprocess > +import sys > +import tarfile > +import time > +import urllib.parse > +import urllib.request > + > +import requests > +import swiftclient > +from distro_info import UbuntuDistroInfo > + > +WORKING_DIR = "/home/ubuntu/autopkgtest-cloud/tools/" > +SCRIPT = "run-autopkgtest" > + > +UDI = UbuntuDistroInfo() > +LATEST = UDI.supported()[-1] > +# ARCHES = ["amd64", "arm64", "ppc64el", "s390x", "armhf", "ppa"] should be this one, uncomment and remove next line > +ARCHES = ["ppa"] > +PACKAGE = "gzip" > +AUTOPKGTEST_SITE = "https://autopkgtest.ubuntu.com" amend this to use proper domain > +QUEUED_JSON = "%s/queues.json" % AUTOPKGTEST_SITE > +RUNNING_PAGE = "%s/static/running.json" % AUTOPKGTEST_SITE > +JOURNAL_CMD = [ > + "journalctl", > + "--since", > + "5 minutes ago", > + "--no-pager", > + "-u", > + "autopkgtest@*", > +] > +PPA_NAME = "andersson123/hello" replace this with a team ppa, detailed in another one of my cards. > + > +ARGS = "" > + > +TIMEOUT = 60 * 60 > + > +URL = "https://launchpad.net/ubuntu/%s/+source/%s" % (LATEST, PACKAGE) be more descriptive lol > +SWIFT_CREDS_FP = "/home/ubuntu/swift-password.cred" > + > +INTEGRATION_KEY_FP = "/home/ubuntu/integration-key" > + > + > +def get_swift_creds(): > + swift_file = "" > + file_vals = {} > + with open(SWIFT_CREDS_FP, "r") as f: > + swift_file = f.read() > + for line in swift_file.splitlines(): > + key, val = line.split("=") > + val = val.replace('"', "") > + file_vals[key] = val > + swift_creds = { > + "authurl": file_vals["SWIFT_AUTH_URL"], > + "user": file_vals["SWIFT_USERNAME"], > + "key": file_vals["SWIFT_PASSWORD"], > + "os_options": { > + "region_name": file_vals["SWIFT_REGION"], > + "project_domain_name": file_vals["SWIFT_PROJECT_DOMAIN_NAME"], > + "project_name": file_vals["SWIFT_PROJECT_NAME"], > + "user_domain_name": file_vals["SWIFT_USER_DOMAIN_NAME"], > + }, > + "auth_version": file_vals["SWIFT_AUTH_VERSION"], > + } > + return swift_creds > + > + > +def find_result_in_swift(swift_con, arch): refactor this function to use UUID instead of guessing whether the result is accurate. > + time.sleep(15) take sleep out of the function, or make it so this doesn't need a sleep at all > + # presuming test_info will be useful > + # add a sleep here so we can be sure the result is in swift > + # Need to handle PPA case differently > + # > https://autopkgtest.ubuntu.com/results/autopkgtest-RELEASE-LPUSER-PPA/?format=plain > + # results available like this > + container_name = ( > + ("autopkgtest-" + LATEST) > + if arch != "ppa" > + else ( > + "autopkgtest-%s-%s-%s" > + % ( > + LATEST, > + PPA_NAME.split("/", maxsplit=1)[0], > + PPA_NAME.split("/")[1], > + ) > + ) > + ) > + arch_key = arch if arch != "ppa" else "amd64" > + logging.info("Container name:\n%s" % container_name) > + > + time_now = datetime.datetime.now() > + while True: > + _, objects = swift_con.get_container(container_name, > full_listing=True) > + for object in objects: > + logging.info("Object:\n%s" % object["name"]) remove this logging and all debug logging > + logging.info("Latest: %s" % LATEST) > + logging.info("PPA Name: %s" % PPA_NAME) > + logging.info("Package: %s" % PACKAGE) > + logging.info("arch: %s" % arch_key) > + ## check object name first > + # ah yes i need to modify arch for this! > + logging.info( > + "Latest in object?: %s" % str(LATEST in object["name"]) > + ) > + logging.info( > + "Package in object?: %s" % str(PACKAGE in object["name"]) > + ) > + logging.info( > + "arch in object?: %s" % str(arch_key in object["name"]) > + ) > + if ( > + LATEST in object["name"] > + and PACKAGE in object["name"] > + and arch_key in object["name"] > + ): > + obj_time = object["last_modified"].split(".")[0] > + datetime_obj_time = datetime.datetime.strptime( > + obj_time, "%Y-%m-%dT%H:%M:%S" > + ) > + time_diff = abs( > + time_now.timestamp() - datetime_obj_time.timestamp() > + ) > + logging.info("Are we getting here?") > + logging.info("Time diff: %s" % str(time_diff)) > + if time_diff < 600: > + return object > + > + > +def get_trigger(): > + r = requests.get(URL) > + ctr = 0 > + resp = r.content.decode("utf-8") > + idx = 0 > + for line in resp.splitlines(): > + if "Current version" in line: > + idx = ctr + 1 > + ctr += 1 > + curr_ver = resp.splitlines()[idx] > + curr_ver = ( > + curr_ver.replace("<dd>", "").replace("</dd>", "").replace(" ", "") > + ) > + return "%s/%s" % (PACKAGE, curr_ver) > + > + > +def check_logfile_is_accessible(url): > + url = url.replace("artifacts.tar.gz", "log.gz") > + try: > + r = requests.get(url) > + except requests.exceptions.HTTPError as err: > + logging.info("Acquiring logfile failed with:\n%s" % err) > + return False > + logging.info("Acquiring logfile succeeded!") > + logging.debug("Full logfile:\n%s" % r.content) > + return True > + > + > +def check_result(swift_con, arch): > + logging.info("Getting container and object...") > + this_test_results = {} > + result = find_result_in_swift(swift_con=swift_con, arch=arch) > + logging.info("Found object in swift:\n%s" % str(result)) > + object_path_lst = result["name"].split("/") > + object_path_lst = object_path_lst[:-1] > + object = "/".join(object_path_lst) > + container = "autopkgtest-" + LATEST > + logging.info("container: %s\nobject: %s" % (container, object)) > + url = "%s/results/%s/%s/%s" % ( > + AUTOPKGTEST_SITE, > + container, > + object, > + "artifacts.tar.gz", > + ) > + logging.info("Results url: %s" % url) > + r = requests.get(url) > + if r.status_code == 200: > + with open("/tmp/artifacts.tar.gz", "wb") as f: > + f.write(r.content) > + logging.info("Acquired results!") > + file = tarfile.open("/tmp/artifacts.tar.gz") > + file.extractall("/tmp/") > + file.close() > + with open("/tmp/exitcode", "r") as f: > + code = f.read() > + logging.info("code: %s" % str(code)) > + this_test_results["logfile-accessible"] = > check_logfile_is_accessible(url) > + this_test_results["test-passed"] = False > + try: > + if int(code) == 0: > + this_test_results["test-passed"] = True > + except TypeError as _: > + pass > + return this_test_results > + > + > +if __name__ == "__main__": reduce cyclomatic complexity of the main function - split into separate functions > + logging.getLogger().setLevel(logging.INFO) > + logging.info("getting trigger...") > + trigger = get_trigger() > + swift_creds = get_swift_creds() > + swift_con = swiftclient.Connection(**swift_creds) > + logging.info("got trigger: %s" % trigger) > + results = {} > + # I should also queue a test from a ppa > + for arch in ARCHES: > + results[arch] = {} > + args = "%s%s -s %s -a %s --trigger=%s %s" % ( change to formatted string > + WORKING_DIR, > + SCRIPT, > + LATEST, > + arch, > + trigger, > + PACKAGE, > + ) > + if arch == "ppa": > + args = args.replace("ppa", "amd64") > + args += " --ppa %s" % PPA_NAME > + logging.info( > + "run-autopkgtest args:\n%s\nRunning autopkgtest..." % args > + ) > + # submit the test > + p = subprocess.run(args.split(" "), check=True) > + test_info = { > + PACKAGE: { > + "triggers": [trigger], > + } > + } > + in_queue = False > + saved_item = "" > + > + logging.info("Checking running.json for test...") > + # wait for the test to appear in running.json > + # This needs a timeout I believe > + start_time = datetime.datetime.now() > + failed = False > + is_running = False > + saved_skey = "" > + while not is_running and not failed: > + loop_time = datetime.datetime.now() > + duration = loop_time - start_time > + if duration.total_seconds() > TIMEOUT: > + failed = True > + break > + running = requests.get(RUNNING_PAGE) > + running_json = json.loads(running.content) > + for package, values in running_json.items(): no need to loop, index with package name > + if package == PACKAGE: > + for skey, details in values.items(): > + num_triggers = len(test_info[PACKAGE]["triggers"]) > + ctr = 0 > + for trigger in test_info[PACKAGE]["triggers"]: > + if trigger in skey: > + ctr += 1 > + if ctr == num_triggers: > + try: > + this_arch = arch if arch != "ppa" else > "amd64" > + test_info[PACKAGE][ > + "submit-time" > + ] = running_json[package][skey][LATEST][ > + this_arch > + ][ > + 0 > + ][ > + "submit-time" > + ] > + except KeyError as _: > + continue > + saved_skey = skey > + is_running = True > + logging.info("Test found in running.json!") > + logging.info("Waiting for test to leave running.json...") > + # wait for the test to leave running.json > + while is_running and not failed: > + loop_time = datetime.datetime.now() > + duration = loop_time - start_time > + if duration.total_seconds() > TIMEOUT: > + failed = True > + break > + running = requests.get(RUNNING_PAGE) > + if saved_skey not in running.content.decode("utf-8"): > + is_running = False > + logging.info("Test has left running.json!") > + logging.info("Getting results for test!") > + if not failed: > + results[arch] = check_result(swift_con, arch) > + else: > + results[arch] = False > + logging.info("Results:\n%s" % json.dumps(results, indent=2)) > + # this needs changing figure out what this comment was for > + cp = configparser.ConfigParser() shouldn't be at the end, put at beginning of main > + cp.read(os.path.expanduser("~ubuntu/autopkgtest-cloud.conf")) > + try: > + webpage = cp["web"]["ExternalURL"].replace("/results", "") > + except KeyError: > + # change to logging maybe ? > + print("No external url found!") > + sys.exit(1) > + keypass = "" the comment for "this needs changing" was in reference to the keypass used here > + with open(INTEGRATION_KEY_FP, "r") as f: > + keypass = f.read().rstrip() > + post_me = { > + "type": "cloud", > + "source": socket.gethostname(), > + "pass": keypass, > + "test": __file__, > + "results": results, > + } > + results_url = webpage + "/post-integration-results" > + req = urllib.request.Request(results_url) > + req.add_header("Content-Type", "application/json; charset=utf-8") > + jsondata = json.dumps(post_me).encode("utf-8") > + req.add_header("Content-Length", len(jsondata)) > + response = urllib.request.urlopen(req, jsondata) > + > + with open("/home/ubuntu/test-run-autopkgtest-results.json", "w") as f: > + f.write(json.dumps(results, indent=2)) > + for arch, result in results.items(): > + for key, t_f in result.items(): > + if not t_f: > + sys.exit(1) -- https://code.launchpad.net/~andersson123/autopkgtest-cloud/+git/autopkgtest-cloud/+merge/457239 Your team Canonical's Ubuntu QA is requested to review the proposed merge of ~andersson123/autopkgtest-cloud:integration-tests into autopkgtest-cloud:master. -- Mailing list: https://launchpad.net/~canonical-ubuntu-qa Post to : canonical-ubuntu-qa@lists.launchpad.net Unsubscribe : https://launchpad.net/~canonical-ubuntu-qa More help : https://help.launchpad.net/ListHelp