Commit edb1c256 authored by Jan Kiene's avatar Jan Kiene
Browse files

make get_job_id return a list of ids

parent 84f33156
Loading
Loading
Loading
Loading
+32 −10
Original line number Diff line number Diff line
@@ -40,9 +40,15 @@ PAGE_SUFFIX = "&page={}"
API_URL_TMPL = "https://forge.3gpp.org/rep/api/v4/projects/{}/pipelines"


def get_job_id(branch_name, job_name, project_id, success_only):
    job_id = -1
    # check last 500 pipelines max
def get_job_ids(
    branch_name: str, job_name: str, project_id: int, success_only: bool, n: int
) -> list[int]:
    """
    Find id of last <n> jobs with name <branch_name> for prject with id <project_id>.
    If <success_only> is true, only jobs with status success are considered.
    """
    job_ids = list()
    # check last 5000 pipelines max
    for page in range(100):
        url_pls = API_URL_TMPL.format(project_id)

@@ -64,15 +70,18 @@ def get_job_id(branch_name, job_name, project_id, success_only):
                for job in resp_jobs.json():
                    include_job = not success_only or job["status"] == "success"
                    if include_job and job["name"] == job_name:
                        job_id = job["id"]
                        job_ids.append(job["id"])

                        if len(job_ids) == n:
                            break
                if job_id >= 0:

                if len(job_ids) == n:
                    break

        if job_id >= 0:
        if len(job_ids) == n:
            break

    return job_id
    return job_ids


if __name__ == "__main__":
@@ -80,9 +89,22 @@ if __name__ == "__main__":
    parser.add_argument("branch_name", help="Name of the branch to search on")
    parser.add_argument("job_name", help="Name of the job to get the id of")
    parser.add_argument("project_id", help="ID of project to search in", type=int)
    parser.add_argument("--success_only", help="Only include jobs with status 'success'", action="store_true")
    parser.add_argument(
        "--success_only",
        help="Only include jobs with status 'success'",
        action="store_true",
    )

    args = parser.parse_args()

    job_id = get_job_id(args.branch_name, args.job_name, args.project_id, args.success_only)
    n = 1
    job_ids = get_job_ids(
        args.branch_name, args.job_name, args.project_id, args.success_only, n
    )

    try:
        job_id = job_ids[0]
    except IndexError:
        job_id = -1

    print(job_id)
+5 −3
Original line number Diff line number Diff line
@@ -5,7 +5,7 @@ import subprocess
import sys
import csv

from get_id_of_last_job_occurence import get_job_id
from get_id_of_last_job_occurence import get_job_ids

PUBLIC_FOL_MAGIC = "-public"
PROJECT_ID_FLOAT_REPO = 49
@@ -100,9 +100,11 @@ def get_artifacts_for_jobs_and_return_num_failed(

    for job, artifact_folders in jobs.items():
        print(os.environ["CI_DEFAULT_BRANCH"], job, project_id, success_only)
        job_id = get_job_id(
            os.environ["CI_DEFAULT_BRANCH"], job, project_id, success_only
        job_ids = get_job_ids(
            os.environ["CI_DEFAULT_BRANCH"], job, project_id, success_only, 1
        )
        job_id = -1 if len(job_ids) == 0 else job_ids[0]

        print(f"{job_id} - {job}")
        try:
            curl_for_artifacts(job_id)