From a324887211bfab6db4454fd8e211b9ca8073f32a Mon Sep 17 00:00:00 2001 From: knj Date: Mon, 19 Dec 2022 15:10:06 +0100 Subject: [PATCH 1/3] extend pages job to also get the coverage job artifacts --- .gitlab-ci.yml | 8 ++++++++ ci/index-pages.html | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 53cdd2871b..55108e47c3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1147,6 +1147,14 @@ pages: - *unzip-or-cat - mv complexity-StereoDmxEVS-stereo-in-mono-out-public ./public/ + ### collect artifacts from coverage job + - job_id=$(python3 ci/get_id_of_last_job_occurence.py $branch coverage-test-on-main-scheduled) + - echo $job_id + - echo "$API_URL_BASE/$job_id/artifacts" + - curl --request GET "$API_URL_BASE/$job_id/artifacts" --output $ARTIFACTS + - *unzip-or-cat + - mv coverage ./public + - cp ci/index-pages.html public/index.html artifacts: paths: diff --git a/ci/index-pages.html b/ci/index-pages.html index 0a2e73e78e..9d60155e8f 100644 --- a/ci/index-pages.html +++ b/ci/index-pages.html @@ -16,4 +16,10 @@
  • StereoDmxEVS
  • +

    Test Coverage

    + + + -- GitLab From 63bf57e5a4bd0e619d5df0eb37a89c1c1ccd78d0 Mon Sep 17 00:00:00 2001 From: knj Date: Mon, 19 Dec 2022 15:16:01 +0100 Subject: [PATCH 2/3] refactor get_job_id script for importing --- ci/get_id_of_last_job_occurence.py | 77 +++++++++++++++--------------- 1 file changed, 39 insertions(+), 38 deletions(-) diff --git a/ci/get_id_of_last_job_occurence.py b/ci/get_id_of_last_job_occurence.py index f6223d9980..ca66711041 100755 --- a/ci/get_id_of_last_job_occurence.py +++ b/ci/get_id_of_last_job_occurence.py @@ -38,44 +38,45 @@ PAGE_SUFFIX = "&page={}" API_BASE_URL = "https://forge.3gpp.org/rep/api/v4/projects/49" -parser = argparse.ArgumentParser() -parser.add_argument("branch_name") -parser.add_argument("job_name") - -args = parser.parse_args() - -branch_name = args.branch_name -job_name = args.job_name - - -job_id = -1 -# check last 500 pipelines max -for page in range(100): - url_pls = API_BASE_URL + "/pipelines" - - # need both suffixes here to descend through the pages and get also older pipelines - suffix = PER_PAGE_SUFFIX + PAGE_SUFFIX.format(page) - resp_pls = requests.get(url_pls + suffix) - for pl in resp_pls.json(): - if pl["ref"] == branch_name: - url_jobs = url_pls + f"/{pl['id']}/jobs" - - # only one of the suffixes here - this assumes only max of 50 jobs per pipeline - # so only one page needed - resp_jobs = requests.get(url_jobs + PER_PAGE_SUFFIX) - - if job_name not in resp_jobs.text: - continue - - # find actual job by name - for job in resp_jobs.json(): - if job["name"] == job_name and job["status"] == "success": - job_id = job["id"] +def get_job_id(branch_name, job_name): + job_id = -1 + # check last 500 pipelines max + for page in range(100): + url_pls = API_BASE_URL + "/pipelines" + + # need both suffixes here to descend through the pages and get also older pipelines + suffix = PER_PAGE_SUFFIX + PAGE_SUFFIX.format(page) + resp_pls = requests.get(url_pls + suffix) + for pl in resp_pls.json(): + if pl["ref"] == branch_name: + url_jobs = url_pls + f"/{pl['id']}/jobs" + + # only one of the suffixes here - this assumes only max of 50 jobs per pipeline + # so only one page needed + resp_jobs = requests.get(url_jobs + PER_PAGE_SUFFIX) + + if job_name not in resp_jobs.text: + continue + + # find actual job by name + for job in resp_jobs.json(): + if job["name"] == job_name and job["status"] == "success": + job_id = job["id"] + break + if job_id >= 0: break - if job_id >= 0: - break - if job_id >= 0: - break + if job_id >= 0: + break -print(job_id) + return job_id + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("branch_name") + parser.add_argument("job_name") + + args = parser.parse_args() + + job_id = get_job_id(args.branch_name, args.job_name) + print(job_id) \ No newline at end of file -- GitLab From de4659435d4587001c48be8712793cfd3f708c31 Mon Sep 17 00:00:00 2001 From: knj Date: Mon, 19 Dec 2022 15:40:13 +0100 Subject: [PATCH 3/3] add script for the pages job to avoid repetition --- ci/setup_pages.py | 66 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 ci/setup_pages.py diff --git a/ci/setup_pages.py b/ci/setup_pages.py new file mode 100644 index 0000000000..4ce8cf0f30 --- /dev/null +++ b/ci/setup_pages.py @@ -0,0 +1,66 @@ +import os +import sys +import pathlib +import subprocess +from .get_id_of_last_job_occurence import get_job_id + +JOBS = [ + "complexity-stereo-in-stereo-out", "complexity-ism-in-binaural-out", "complexity-sba-hoa3-in-hoa3-out", "complexity-mc-in-7_1_4-out", "complexity-masa-in-7_1_4-out", "complexity-StereoDmxEVS-stereo-in-mono-out", "coverage-test-on-main-scheduled" +] +ARTIFACTS = "artifacts.zip" +API_URL_BASE = "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs" +PUBLIC = "./public" + + +def main(): + + public_folder = pathlib.Path(PUBLIC) + public_folder.mkdir() + + failed_count = 0 + for job in JOBS: + job_id = get_job_id(os.environ["CI_COMMIT_REF_NAME"], job) + try: + curl_for_artifacts(job_id) + + job_public = job + "-public" + if job == "coverage-test-on-main-scheduled": + job_public = "coverage" + + pathlib.Path(job_public).rename(public_folder.joinpath(job_public)) + + except subprocess.CalledProcessError: + print(f"Could not get artifacts for {job}") + failed_count += 1 + + if failed_count == len(JOBS): + sys.exit(1) + + +def curl_for_artifacts(job_id): + cmd = [ + "curl", + "--request", + "GET", + API_URL_BASE + f"/{job_id}/artifacts", + "--output", + ARTIFACTS + ] + subprocess.run(cmd, check=True) + + # check for valid archive (if not, it is likely a 404 page, then display that) + cmd = [ + "unzip", + "-t", + ARTIFACTS + ] + try: + subprocess.run(cmd, check=True) + except subprocess.CalledProcessError: + with open(ARTIFACTS, "r") as f: + print(f.read()) + raise subprocess.CalledProcessError("Unzip check failed") + + +if __name__ == "__main__": + main() \ No newline at end of file -- GitLab