From c4b66eb562a52bc61ab05709762498b29d6ca12f Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:25:35 +0200 Subject: [PATCH 01/50] test listing pipelines --- .gitlab-ci.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1c75ef140..e625b1844 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -66,6 +66,9 @@ workflow: variables: IVAS_PIPELINE_NAME: 'Renderer framesize BE test on $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'peaq-enc-passthrough' + variables: + IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' || $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' variables: IVAS_PIPELINE_NAME: 'PEAQ encoder pass-through test: $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'schedule' # Scheduled in any branch @@ -1634,6 +1637,29 @@ voip-be-on-merge-request: - make -j >> /dev/null - python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py +long-term-job-logs: + rules: + -if $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + stage: test + timeout: "20 minutes" + script: + - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" + + #- id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH $CI_JOB_NAME $CI_PROJECT_ID) + #- echo "Job ID from variables - $CI_JOB_ID, Job ID from script - $id_previous" + #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + #- unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + #- file_previous="previous_artifacts/mld--$CI_JOB_NAME-$id_previous--sha-*.csv" + #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME + - TODAY=$(date +'%Y-%m-%d') + # create log if it doesn't exist + - mkdir -p logs/$TODAY + - ls + - ls logs + + + # --------------------------------------------------------------- # Complexity measurement jobs -- GitLab From a0de38f21270ec1b45bf86f8d6196524da7cb539 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:26:37 +0200 Subject: [PATCH 02/50] fix syntax error --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e625b1844..2b22701b8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1639,7 +1639,7 @@ voip-be-on-merge-request: long-term-job-logs: rules: - -if $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + -if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job stage: test timeout: "20 minutes" script: -- GitLab From 3ea49bd8223b4ac925d840acea9efe700e25b949 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:28:51 +0200 Subject: [PATCH 03/50] fix syntax error --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2b22701b8..11866fffa 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1639,7 +1639,7 @@ voip-be-on-merge-request: long-term-job-logs: rules: - -if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job stage: test timeout: "20 minutes" script: -- GitLab From dac810a8af51c3f4f660248d5639d88c87ec17eb Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:34:50 +0200 Subject: [PATCH 04/50] fix manual pipeline name --- .gitlab-ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 11866fffa..9acab3d8a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -66,11 +66,11 @@ workflow: variables: IVAS_PIPELINE_NAME: 'Renderer framesize BE test on $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'peaq-enc-passthrough' - variables: - IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' - - if: $CI_PIPELINE_SOURCE == 'web' || $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' variables: IVAS_PIPELINE_NAME: 'PEAQ encoder pass-through test: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' + variables: + IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'schedule' # Scheduled in any branch variables: IVAS_PIPELINE_NAME: 'Scheduled pipeline: $CI_COMMIT_BRANCH' -- GitLab From f9a46a42fb0a63916ddc7e9f17f6c90699407114 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:39:29 +0200 Subject: [PATCH 05/50] update gitlab.yaml --- .gitlab-ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9acab3d8a..56716ba09 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -29,6 +29,9 @@ workflow: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'default' # for testing variables: IVAS_PIPELINE_NAME: 'Web run pipeline: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' + variables: + IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare' variables: IVAS_PIPELINE_NAME: 'Run comparison tools against float ref: $CI_COMMIT_BRANCH' @@ -68,9 +71,6 @@ workflow: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'peaq-enc-passthrough' variables: IVAS_PIPELINE_NAME: 'PEAQ encoder pass-through test: $CI_COMMIT_BRANCH' - - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' - variables: - IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'schedule' # Scheduled in any branch variables: IVAS_PIPELINE_NAME: 'Scheduled pipeline: $CI_COMMIT_BRANCH' -- GitLab From 3fb306c27c3825339dfd3c99c8c47e76d148dab3 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 11:42:15 +0200 Subject: [PATCH 06/50] make job a manual job --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 56716ba09..a8b50ac69 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1640,6 +1640,7 @@ voip-be-on-merge-request: long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + when: manual stage: test timeout: "20 minutes" script: -- GitLab From e7f91d3ceb7d03b36501b4c77526786896595325 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 12:44:56 +0200 Subject: [PATCH 07/50] add variable for new manual job --- .gitlab-ci/variables.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci/variables.yml b/.gitlab-ci/variables.yml index d91866b86..a804cdddb 100644 --- a/.gitlab-ci/variables.yml +++ b/.gitlab-ci/variables.yml @@ -65,3 +65,4 @@ variables: - 'voip-be-test' - 'renderer-framesize-be' - 'peaq-enc-passthrough' + - 'long-term-logs' -- GitLab From 25089d410d39083104e7033d1b3704297156a6e2 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Wed, 9 Jul 2025 12:47:25 +0200 Subject: [PATCH 08/50] move job to '.pre' stage --- .gitlab-ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a8b50ac69..9fc0b5e89 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1640,8 +1640,7 @@ voip-be-on-merge-request: long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job - when: manual - stage: test + stage: .pre timeout: "20 minutes" script: - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" -- GitLab From 8909e61337f847fdf63cf529da7a99b6ee9af523 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 09:21:00 +0200 Subject: [PATCH 09/50] test getting the artifacts from ivas-pytest-compare_ref-long-enc --- .gitlab-ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9fc0b5e89..f1ca84740 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1645,16 +1645,18 @@ long-term-job-logs: script: - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" - #- id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH $CI_JOB_NAME $CI_PROJECT_ID) - #- echo "Job ID from variables - $CI_JOB_ID, Job ID from script - $id_previous" + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) + - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - #- unzip artifacts.zip -d previous_artifacts + - unzip artifacts.zip -d previous_artifacts # This wildcard thingy relies on only one csv file being present per job - #- file_previous="previous_artifacts/mld--$CI_JOB_NAME-$id_previous--sha-*.csv" + - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" + - echo $ivas-pytest-compare_ref-long-enc_csv #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME - TODAY=$(date +'%Y-%m-%d') # create log if it doesn't exist - mkdir -p logs/$TODAY + - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY - ls - ls logs -- GitLab From a59209c78adb7111e9e11e95d8d0154388102d3e Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 09:33:36 +0200 Subject: [PATCH 10/50] use anchor for long-term-job-.logs job --- .gitlab-ci.yml | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f1ca84740..d9987e7b1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -293,6 +293,25 @@ workflow: - touch $MERGED_CSV_ARTIFACT_NAME - fi + # Aggregate job logs + - if [ $GET_LOGS -eq 1 ]; then + - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" + + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) + - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" + #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" + - echo $ivas-pytest-compare_ref-long-enc_csv + #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME + - TODAY=$(date +'%Y-%m-%d') + # create log if it doesn't exist + - mkdir -p logs/$TODAY + - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY + - ls + - fi + - if [ $zero_errors != 1 ]; then - echo "Run errors encountered!" # TODO: temporary only to not fail MR pipelines on crashes @@ -1637,6 +1656,23 @@ voip-be-on-merge-request: - make -j >> /dev/null - python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py + +ivas-long-term-job-logs: + extends: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + tags: + - ivas-linux + timeout: "20 minutes" + stage: .pre + before_script: + - USE_LTV=1 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE_ENCODER" + - LEVEL_SCALING=1.0 + - SPLIT_COMPARISON="false" + - GET_LOGS=1 + <<: *ivas-pytest-anchor + long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job -- GitLab From b4255d5dc6c7c71a830d305f86513f3747970fa0 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 09:35:57 +0200 Subject: [PATCH 11/50] fix syntax err --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d9987e7b1..87089e6d3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1658,7 +1658,7 @@ voip-be-on-merge-request: ivas-long-term-job-logs: - extends: + rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job tags: - ivas-linux -- GitLab From 92a8417c101de494f0292f35e8d99435c878db89 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 09:41:43 +0200 Subject: [PATCH 12/50] remove job from .pre stage --- .gitlab-ci.yml | 28 +--------------------------- 1 file changed, 1 insertion(+), 27 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 87089e6d3..50cee1d4d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -295,11 +295,10 @@ workflow: # Aggregate job logs - if [ $GET_LOGS -eq 1 ]; then - - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" - #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts # This wildcard thingy relies on only one csv file being present per job - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" @@ -1662,8 +1661,6 @@ ivas-long-term-job-logs: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job tags: - ivas-linux - timeout: "20 minutes" - stage: .pre before_script: - USE_LTV=1 - DUT_DECODER_PATH=./$REF_DECODER_PATH @@ -1673,29 +1670,6 @@ ivas-long-term-job-logs: - GET_LOGS=1 <<: *ivas-pytest-anchor -long-term-job-logs: - rules: - - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job - stage: .pre - timeout: "20 minutes" - script: - - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines" - - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) - - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" - #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" - - echo $ivas-pytest-compare_ref-long-enc_csv - #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME - - TODAY=$(date +'%Y-%m-%d') - # create log if it doesn't exist - - mkdir -p logs/$TODAY - - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY - - ls - - ls logs - -- GitLab From a44322cc44d1ebd90f4801d8a1432b67bd46a816 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 09:57:46 +0200 Subject: [PATCH 13/50] remove job from anchor and to .pre stage --- .gitlab-ci.yml | 62 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 50cee1d4d..777364efa 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1661,6 +1661,8 @@ ivas-long-term-job-logs: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job tags: - ivas-linux + stage: .pre + timeout: "20 minutes" before_script: - USE_LTV=1 - DUT_DECODER_PATH=./$REF_DECODER_PATH @@ -1668,7 +1670,65 @@ ivas-long-term-job-logs: - LEVEL_SCALING=1.0 - SPLIT_COMPARISON="false" - GET_LOGS=1 - <<: *ivas-pytest-anchor + variables: + # keep "mld" in artifact name for backwards compatibility reasons + CSV_ARTIFACT_NAME: "mld--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv" + CSV_ARTIFACT_SPLIT: "mld--split--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv" + MERGED_CSV_ARTIFACT_NAME: "$CI_JOB_NAME--merged_csv--$CI_JOB_ID.csv" + PAGES_HTML_ARTIFACT_NAME: "$CI_JOB_NAME-index.html" + SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html" + SUMMARY_HTML_ARTIFACT_SPLIT: "summary_split_$CI_JOB_NAME.html" + IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME" + IMAGES_ARTIFACT_SPLIT: "images_split_$CI_JOB_NAME" + script: + - !reference [ .job-linux, before_script ] + + - set -euxo pipefail + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + - if [ $USE_LTV -eq 1 ]; then + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh + - testcase_timeout=$TESTCASE_TIMEOUT_LTV + - else + - testcase_timeout=$TESTCASE_TIMEOUT_STV + - fi + + # Aggregate job logs + - if [ $GET_LOGS -eq 1 ]; then + + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) + - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" + - echo $ivas-pytest-compare_ref-long-enc_csv + #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME + - TODAY=$(date +'%Y-%m-%d') + # create log if it doesn't exist + - mkdir -p logs/$TODAY + - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY + - ls + - fi + - exit 0 + + allow_failure: + exit_codes: + - 123 + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + #- report-junit.xml + #- report.html + - logs + expose_as: "ivas long term job logs results" + #reports: + # junit: + # - report-junit.xml + -- GitLab From b0bdb717cde913e7b2df0b1d9349171d967b2f94 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 10:07:32 +0200 Subject: [PATCH 14/50] add missing $ --- .gitlab-ci.yml | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 777364efa..9b573bd6a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -293,24 +293,6 @@ workflow: - touch $MERGED_CSV_ARTIFACT_NAME - fi - # Aggregate job logs - - if [ $GET_LOGS -eq 1 ]; then - - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) - - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" - - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" - - echo $ivas-pytest-compare_ref-long-enc_csv - #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME - - TODAY=$(date +'%Y-%m-%d') - # create log if it doesn't exist - - mkdir -p logs/$TODAY - - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY - - ls - - fi - - if [ $zero_errors != 1 ]; then - echo "Run errors encountered!" # TODO: temporary only to not fail MR pipelines on crashes @@ -1708,7 +1690,7 @@ ivas-long-term-job-logs: - TODAY=$(date +'%Y-%m-%d') # create log if it doesn't exist - mkdir -p logs/$TODAY - - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY + - mv $ivas-pytest-compare_ref-long-enc_csv logs/$TODAY - ls - fi - exit 0 -- GitLab From c4e98f13ca70124936cb71143006b9d85bbfa63d Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 10:12:14 +0200 Subject: [PATCH 15/50] Update file .gitlab-ci.yml --- .gitlab-ci.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9b573bd6a..d8f3332e8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1683,14 +1683,11 @@ ivas-long-term-job-logs: - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv" - - echo $ivas-pytest-compare_ref-long-enc_csv - #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME - TODAY=$(date +'%Y-%m-%d') - # create log if it doesn't exist + # create logs dir if it doesn't exist - mkdir -p logs/$TODAY - - mv $ivas-pytest-compare_ref-long-enc_csv logs/$TODAY + # This wildcard thingy relies on only one csv file being present per job + - mv previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv logs/$TODAY - ls - fi - exit 0 -- GitLab From 4081cea23ff04534e074610453edc717323d4092 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 10:54:14 +0200 Subject: [PATCH 16/50] test getting logs for all relevant jobs --- .gitlab-ci.yml | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d8f3332e8..a550027f0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1677,19 +1677,21 @@ ivas-long-term-job-logs: - fi # Aggregate job logs - - if [ $GET_LOGS -eq 1 ]; then - - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID) - - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous" - - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip -d previous_artifacts - - TODAY=$(date +'%Y-%m-%d') - # create logs dir if it doesn't exist - - mkdir -p logs/$TODAY - # This wildcard thingy relies on only one csv file being present per job - - mv previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv logs/$TODAY - - ls - - fi + - | + job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") + for job in "${job[@]}"; do + echo "Getting job logs for: $job" + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job" $CI_PROJECT_ID) + - echo "Job ID from variables - "$job", Job ID from script - $id_previous" + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d previous_artifacts + - TODAY=$(date +'%Y-%m-%d') + # create logs dir if it doesn't exist + - mkdir -p logs/$TODAY + # This wildcard thingy relies on only one csv file being present per job + - mv previous_artifacts/mld--"$job"-$id_previous--sha-*.csv logs/$TODAY + done + - ls logs - exit 0 allow_failure: -- GitLab From fe1e1413c2532cb0bc22aa778653584ebc6276be Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:04:11 +0200 Subject: [PATCH 17/50] fix yaml syntax --- .gitlab-ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a550027f0..ffe70e261 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1679,17 +1679,19 @@ ivas-long-term-job-logs: # Aggregate job logs - | job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") - for job in "${job[@]}"; do + for job_name in "${job[@]}"; do echo "Getting job logs for: $job" - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job" $CI_PROJECT_ID) - - echo "Job ID from variables - "$job", Job ID from script - $id_previous" + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) + - echo "Job ID from variables - "$job_name", Job ID from script -d $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts - TODAY=$(date +'%Y-%m-%d') # create logs dir if it doesn't exist - mkdir -p logs/$TODAY # This wildcard thingy relies on only one csv file being present per job - - mv previous_artifacts/mld--"$job"-$id_previous--sha-*.csv logs/$TODAY + - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY + - rm artifacts.zip + - rm -r previous_artifacts done - ls logs - exit 0 -- GitLab From 08b51ee7d15dffdd6aa5500bfce3c707747e08a3 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:09:24 +0200 Subject: [PATCH 18/50] fix syntax error --- .gitlab-ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ffe70e261..d1893dbf6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1681,17 +1681,17 @@ ivas-long-term-job-logs: job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") for job_name in "${job[@]}"; do echo "Getting job logs for: $job" - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) - - echo "Job ID from variables - "$job_name", Job ID from script -d $id_previous" - - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip -d previous_artifacts - - TODAY=$(date +'%Y-%m-%d') + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) + echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + unzip artifacts.zip -d previous_artifacts + TODAY=$(date +'%Y-%m-%d') # create logs dir if it doesn't exist - - mkdir -p logs/$TODAY + mkdir -p logs/$TODAY # This wildcard thingy relies on only one csv file being present per job - - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY - - rm artifacts.zip - - rm -r previous_artifacts + mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY + rm artifacts.zip + rm -r previous_artifacts done - ls logs - exit 0 -- GitLab From 32acfa9a148e29b1eef60c7d1e58924d448bf4f8 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:21:04 +0200 Subject: [PATCH 19/50] cleanup --- .gitlab-ci.yml | 42 +++++++----------------------------------- 1 file changed, 7 insertions(+), 35 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d1893dbf6..548dd6020 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1640,41 +1640,21 @@ voip-be-on-merge-request: ivas-long-term-job-logs: rules: - - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" + - if: $CI_PIPELINE_SOURCE == 'schedule' tags: - ivas-linux stage: .pre - timeout: "20 minutes" - before_script: - - USE_LTV=1 - - DUT_DECODER_PATH=./$REF_DECODER_PATH - - TEST_SUITE="$LONG_TEST_SUITE_ENCODER" - - LEVEL_SCALING=1.0 - - SPLIT_COMPARISON="false" - - GET_LOGS=1 - variables: - # keep "mld" in artifact name for backwards compatibility reasons - CSV_ARTIFACT_NAME: "mld--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv" - CSV_ARTIFACT_SPLIT: "mld--split--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv" - MERGED_CSV_ARTIFACT_NAME: "$CI_JOB_NAME--merged_csv--$CI_JOB_ID.csv" - PAGES_HTML_ARTIFACT_NAME: "$CI_JOB_NAME-index.html" - SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html" - SUMMARY_HTML_ARTIFACT_SPLIT: "summary_split_$CI_JOB_NAME.html" - IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME" - IMAGES_ARTIFACT_SPLIT: "images_split_$CI_JOB_NAME" + timeout: "10 minutes" script: - - !reference [ .job-linux, before_script ] - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - - if [ $USE_LTV -eq 1 ]; then - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh - - testcase_timeout=$TESTCASE_TIMEOUT_LTV - - else - - testcase_timeout=$TESTCASE_TIMEOUT_STV - - fi + + # create logs dir if it doesn't exist + - TODAY=$(date +'%Y-%m-%d') + - mkdir -p logs/$TODAY # Aggregate job logs - | @@ -1685,9 +1665,6 @@ ivas-long-term-job-logs: echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip unzip artifacts.zip -d previous_artifacts - TODAY=$(date +'%Y-%m-%d') - # create logs dir if it doesn't exist - mkdir -p logs/$TODAY # This wildcard thingy relies on only one csv file being present per job mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY rm artifacts.zip @@ -1704,13 +1681,8 @@ ivas-long-term-job-logs: expire_in: 1 week when: always paths: - #- report-junit.xml - #- report.html - logs expose_as: "ivas long term job logs results" - #reports: - # junit: - # - report-junit.xml -- GitLab From 79020ccbfdcbd5a52a1e27dfe7a3138247c77bf8 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:27:54 +0200 Subject: [PATCH 20/50] add missing scripts --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 548dd6020..d6c55e359 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1651,6 +1651,8 @@ ivas-long-term-job-logs: - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh # create logs dir if it doesn't exist - TODAY=$(date +'%Y-%m-%d') -- GitLab From 7c871709590308efbfc82eeb73e12b66727e1f51 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:35:50 +0200 Subject: [PATCH 21/50] add missing !ireference --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d6c55e359..85d5f89b7 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1647,7 +1647,7 @@ ivas-long-term-job-logs: stage: .pre timeout: "10 minutes" script: - + - !reference [ .job-linux, before_script ] - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh -- GitLab From 7099af40c170eacca4fe86b3794a57100e69f3ab Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 11:55:39 +0200 Subject: [PATCH 22/50] get previous logs from same job --- .gitlab-ci.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 85d5f89b7..248ca3714 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1658,6 +1658,15 @@ ivas-long-term-job-logs: - TODAY=$(date +'%Y-%m-%d') - mkdir -p logs/$TODAY + # Get previous logs of this job incase we are running on a different machine + - rm logs + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d logs + + - ls logs + # Aggregate job logs - | job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") -- GitLab From f668c0a83664df9f133437f08bbdb841704780f5 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:11:36 +0200 Subject: [PATCH 23/50] remove dir --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 248ca3714..0e888abfb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1659,7 +1659,7 @@ ivas-long-term-job-logs: - mkdir -p logs/$TODAY # Get previous logs of this job incase we are running on a different machine - - rm logs + - rm -r logs - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip -- GitLab From 913b344955374fe12b0550c9cec60571d71e37c4 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:21:22 +0200 Subject: [PATCH 24/50] increase timeout --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0e888abfb..a4d1bd5f6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1645,9 +1645,9 @@ ivas-long-term-job-logs: tags: - ivas-linux stage: .pre - timeout: "10 minutes" + timeout: "20 minutes" script: - - !reference [ .job-linux, before_script ] + - !reference [ before_script ] - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh -- GitLab From 879aa7049d5ad5285ac950f3c41f5ba75e655345 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:23:22 +0200 Subject: [PATCH 25/50] Update file .gitlab-ci.yml --- .gitlab-ci.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a4d1bd5f6..8ae2d1a4d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1642,12 +1642,10 @@ ivas-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" - if: $CI_PIPELINE_SOURCE == 'schedule' - tags: - - ivas-linux stage: .pre timeout: "20 minutes" script: - - !reference [ before_script ] + - !reference [ .job-linux, before_script ] - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh -- GitLab From 06e002e4fe6e39974c2f546fb888380ef5dd3a9e Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:45:33 +0200 Subject: [PATCH 26/50] add tag: ivas-basop-linux --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8ae2d1a4d..bf04ea397 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1642,6 +1642,8 @@ ivas-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" - if: $CI_PIPELINE_SOURCE == 'schedule' + tags: + - ivas-basop-linux stage: .pre timeout: "20 minutes" script: -- GitLab From c81b37149b8e560982771df26badf23f4d230783 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:50:40 +0200 Subject: [PATCH 27/50] Update file .gitlab-ci.yml --- .gitlab-ci.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bf04ea397..d28bf878e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1654,10 +1654,6 @@ ivas-long-term-job-logs: - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh - # create logs dir if it doesn't exist - - TODAY=$(date +'%Y-%m-%d') - - mkdir -p logs/$TODAY - # Get previous logs of this job incase we are running on a different machine - rm -r logs - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) @@ -1667,6 +1663,10 @@ ivas-long-term-job-logs: - ls logs + # create logs dir if it doesn't exist + - TODAY=$(date +'%Y-%m-%d') + - mkdir -p logs/$TODAY + # Aggregate job logs - | job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") @@ -1698,7 +1698,6 @@ ivas-long-term-job-logs: - # --------------------------------------------------------------- # Complexity measurement jobs # --------------------------------------------------------------- -- GitLab From 5601670a5e8df4683d16d0a478e164d1b2e662b8 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:54:10 +0200 Subject: [PATCH 28/50] fix syntax err --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d28bf878e..80cf582ba 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1655,7 +1655,7 @@ ivas-long-term-job-logs: - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh # Get previous logs of this job incase we are running on a different machine - - rm -r logs + - if [ -d "logs" ]; then rm -rf "logs"; fi - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip -- GitLab From b8541b1c844eda50cb5a3d9f70180b5b5535add7 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Thu, 10 Jul 2025 13:57:28 +0200 Subject: [PATCH 29/50] bug fix causing job not to be found --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 80cf582ba..1e5d3266d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1657,7 +1657,7 @@ ivas-long-term-job-logs: # Get previous logs of this job incase we are running on a different machine - if [ -d "logs" ]; then rm -rf "logs"; fi - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d logs -- GitLab From 1b8cb94b95a4238bd4cf61c35b642cbc39264dab Mon Sep 17 00:00:00 2001 From: kinuthia Date: Fri, 11 Jul 2025 10:06:40 +0200 Subject: [PATCH 30/50] handle case when job is running for the first time --- .gitlab-ci.yml | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1e5d3266d..2c190366b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1645,7 +1645,7 @@ ivas-long-term-job-logs: tags: - ivas-basop-linux stage: .pre - timeout: "20 minutes" + timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] - set -euxo pipefail @@ -1656,10 +1656,16 @@ ivas-long-term-job-logs: # Get previous logs of this job incase we are running on a different machine - if [ -d "logs" ]; then rm -rf "logs"; fi + # TODO add variable to inject logs from backup copy + + # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip -d logs + - + - if [ "$id_previous" != "-1" ]; then + - unzip artifacts.zip -d logs + - fi - ls logs @@ -1689,7 +1695,7 @@ ivas-long-term-job-logs: - 123 artifacts: name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" - expire_in: 1 week + expire_in: 4 weeks when: always paths: - logs -- GitLab From 836bdf5093bec7a78f0c6af3f53dfa36fcc45c13 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Fri, 11 Jul 2025 10:12:14 +0200 Subject: [PATCH 31/50] correct formatting --- .gitlab-ci.yml | 68 +++++++++++++++++++++++++------------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2c190366b..79a8a62d6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1646,49 +1646,49 @@ ivas-long-term-job-logs: - ivas-basop-linux stage: .pre timeout: "25 minutes" - script: - - !reference [ .job-linux, before_script ] - - set -euxo pipefail - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh - - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh + script: | + !reference [ .job-linux, before_script ] + set -euxo pipefail + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh # Get previous logs of this job incase we are running on a different machine - - if [ -d "logs" ]; then rm -rf "logs"; fi + if [ -d "logs" ]; then rm -rf "logs"; fi # TODO add variable to inject logs from backup copy # handle case where the job is running for the first time. e.g where the job was not found. - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - - - if [ "$id_previous" != "-1" ]; then - - unzip artifacts.zip -d logs - - fi + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" + curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + + if [ "$id_previous" != "-1" ]; then + unzip artifacts.zip -d logs + fi - - ls logs + ls logs - # create logs dir if it doesn't exist - - TODAY=$(date +'%Y-%m-%d') - - mkdir -p logs/$TODAY + # create logs dir if it doesn't exist + TODAY=$(date +'%Y-%m-%d') + mkdir -p logs/$TODAY # Aggregate job logs - - | - job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") - for job_name in "${job[@]}"; do - echo "Getting job logs for: $job" - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY - rm artifacts.zip - rm -r previous_artifacts - done - - ls logs - - exit 0 + + job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") + for job_name in "${job[@]}"; do + echo "Getting job logs for: $job" + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) + echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY + rm artifacts.zip + rm -r previous_artifacts + done + ls logs + exit 0 allow_failure: exit_codes: -- GitLab From 624f2269a1dd6a3e4f2948771f547b039d3073ee Mon Sep 17 00:00:00 2001 From: kinuthia Date: Fri, 11 Jul 2025 10:15:52 +0200 Subject: [PATCH 32/50] fix syntax --- .gitlab-ci.yml | 85 +++++++++++++++++++++++++------------------------- 1 file changed, 43 insertions(+), 42 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 79a8a62d6..e70b75be9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1646,49 +1646,50 @@ ivas-long-term-job-logs: - ivas-basop-linux stage: .pre timeout: "25 minutes" - script: | - !reference [ .job-linux, before_script ] - set -euxo pipefail - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh - - # Get previous logs of this job incase we are running on a different machine - if [ -d "logs" ]; then rm -rf "logs"; fi - # TODO add variable to inject logs from backup copy - - # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - if [ "$id_previous" != "-1" ]; then - unzip artifacts.zip -d logs - fi - - ls logs - - # create logs dir if it doesn't exist - TODAY=$(date +'%Y-%m-%d') - mkdir -p logs/$TODAY - - # Aggregate job logs - - job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") - for job_name in "${job[@]}"; do - echo "Getting job logs for: $job" - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + script: + - !reference [ .job-linux, before_script ] + - | + set -euxo pipefail + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh + + # Get previous logs of this job incase we are running on a different machine + if [ -d "logs" ]; then rm -rf "logs"; fi + # TODO add variable to inject logs from backup copy + + # handle case where the job is running for the first time. e.g where the job was not found. + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY - rm artifacts.zip - rm -r previous_artifacts - done - ls logs - exit 0 + + if [ "$id_previous" != "-1" ]; then + unzip artifacts.zip -d logs + fi + + ls logs + + # create logs dir if it doesn't exist + TODAY=$(date +'%Y-%m-%d') + mkdir -p logs/$TODAY + + # Aggregate job logs + + job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") + for job_name in "${job[@]}"; do + echo "Getting job logs for: $job" + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) + echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY + rm artifacts.zip + rm -r previous_artifacts + done + ls logs + exit 0 allow_failure: exit_codes: -- GitLab From 6e5b690fca8649c9702c320c3d96c1b60359b546 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Fri, 11 Jul 2025 10:26:48 +0200 Subject: [PATCH 33/50] Update file .gitlab-ci.yml --- .gitlab-ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e70b75be9..173d604fc 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1668,8 +1668,6 @@ ivas-long-term-job-logs: unzip artifacts.zip -d logs fi - ls logs - # create logs dir if it doesn't exist TODAY=$(date +'%Y-%m-%d') mkdir -p logs/$TODAY -- GitLab From bf63482dd045da53b37ec8ada669d9f65400e0d8 Mon Sep 17 00:00:00 2001 From: kinuthia Date: Fri, 11 Jul 2025 10:42:48 +0200 Subject: [PATCH 34/50] require MANUAL_PIPELINE_TYPE to be set when running scheduled job --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 173d604fc..7a0c0f324 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1641,7 +1641,7 @@ voip-be-on-merge-request: ivas-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" - - if: $CI_PIPELINE_SOURCE == 'schedule' + - if: $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == "long-term-logs" tags: - ivas-basop-linux stage: .pre -- GitLab From c9dbededbfd71e5aa92ab0e82b59fd55d639af39 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 15:46:50 +0200 Subject: [PATCH 35/50] A few fixes for long term logs job --- .gitlab-ci.yml | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7a0c0f324..23fd54f3e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -282,7 +282,7 @@ workflow: - if [ $USE_LTV -eq 1 ] && [ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]; then - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH $CI_JOB_NAME $CI_PROJECT_ID) - echo "Job ID from variables - $CI_JOB_ID, Job ID from script - $id_previous" - - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts # This wildcard thingy relies on only one csv file being present per job - file_previous="previous_artifacts/mld--$CI_JOB_NAME-$id_previous--sha-*.csv" @@ -1662,24 +1662,27 @@ ivas-long-term-job-logs: # handle case where the job is running for the first time. e.g where the job was not found. id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip if [ "$id_previous" != "-1" ]; then - unzip artifacts.zip -d logs + # Unzip artifacts to recover past logs dir + curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + unzip artifacts.zip + else + # create logs dir if it doesn't exist + mkdir logs fi - # create logs dir if it doesn't exist + # create folder for today's results TODAY=$(date +'%Y-%m-%d') mkdir -p logs/$TODAY # Aggregate job logs - - job=("ivas-pytest-compare_ref-long-enc" "ivas-pytest-compare_ref-long-enc-lev+10" "ivas-pytest-compare_ref-long-enc-lev-10" "ivas-pytest-compare_ref-long-dec" "ivas-pytest-compare_ref-long-dec-lev+10" "ivas-pytest-compare_ref-long-dec-lev-10" "ivas-pytest-compare_ref-long-fx-fx" "ivas-pytest-compare_ref-long-fx-fx-lev+10" "ivas-pytest-compare_ref-long-fx-fx-lev-10") - for job_name in "${job[@]}"; do + jobs="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" + for job in $jobs; do echo "Getting job logs for: $job" id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip unzip artifacts.zip -d previous_artifacts # This wildcard thingy relies on only one csv file being present per job mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY @@ -1717,7 +1720,7 @@ ivas-long-term-job-logs: # this is a testing/maintenance mechanism to force getting the log history from a specific job id # see below in the concrete complexity jobs - if [ "$JOB_ID_INJECT" != "" ]; then job_id=$JOB_ID_INJECT; fi - - curl --silent --show-error --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts.zip + - curl --silent --show-error --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts.zip - unzip -qq artifacts.zip || true # this may fail on first run, when there are no artifacts there and the zip file is actually just "404"-html - public_dir="$CI_JOB_NAME-public" @@ -1747,7 +1750,7 @@ ivas-long-term-job-logs: ### 1.5.part: get the corresponding measurement from ivas-float-update - job_id=$(python3 ci/get_id_of_last_job_occurence.py ivas-float-update $CI_JOB_NAME $CI_PROJECT_ID) - echo $job_id - - curl --silent --show-error --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts_ref.zip + - curl --silent --show-error --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts_ref.zip - unzip -qq -j artifacts_ref.zip "*latest_WMOPS.csv" || true # add file to arguments only if the artifact could be retrieved to prevent error later. - if [ -f latest_WMOPS.csv ]; then GET_WMOPS_ARGS="$GET_WMOPS_ARGS latest_WMOPS.csv"; fi -- GitLab From 547e6bdc24ac3d55bcffaf7c7c48085525742e0c Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 15:50:31 +0200 Subject: [PATCH 36/50] Cleanup of unnecessary LTV fetch in log job --- .gitlab-ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 23fd54f3e..1c33ff205 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1652,8 +1652,6 @@ ivas-long-term-job-logs: set -euxo pipefail bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh # Get previous logs of this job incase we are running on a different machine if [ -d "logs" ]; then rm -rf "logs"; fi -- GitLab From c9341a901bc67506a446a1646ca0f42404cce99a Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 16:20:45 +0200 Subject: [PATCH 37/50] Fix syntax error in log job --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1c33ff205..cc11f0433 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1675,8 +1675,8 @@ ivas-long-term-job-logs: mkdir -p logs/$TODAY # Aggregate job logs - jobs="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" - for job in $jobs; do + job_names="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" + for job_name in $job_names; do echo "Getting job logs for: $job" id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" -- GitLab From 1bafc9206b21590fde375efaec7e716b5901a729 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 16:41:45 +0200 Subject: [PATCH 38/50] Fix syntax error in long term logs job --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index cc11f0433..f0126ea27 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1677,7 +1677,7 @@ ivas-long-term-job-logs: # Aggregate job logs job_names="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" for job_name in $job_names; do - echo "Getting job logs for: $job" + echo "Getting job logs for: $job_name" id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip -- GitLab From d80dfda86f653656a77d0070011e71c8022f34d0 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 17:13:51 +0200 Subject: [PATCH 39/50] Try excluding build jobs --- .gitlab-ci.yml | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f0126ea27..1bd852a5d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -842,6 +842,10 @@ clang-format-check: # ensure that codec builds on linux build-codec-linux-make: rules: + - if: $CI_JOB_NAME == 'pages' + when: never + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs @@ -859,6 +863,10 @@ build-codec-linux-make: # ensure that codec builds on linux with instrumentation active build-codec-linux-instrumented-make: rules: + - if: $CI_JOB_NAME == 'pages' + when: never + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main @@ -876,6 +884,10 @@ build-codec-linux-instrumented-make: build-codec-linux-debugging-make: rules: + - if: $CI_JOB_NAME == 'pages' + when: never + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main @@ -894,6 +906,10 @@ build-codec-linux-debugging-make: build-codec-windows-msbuild: rules: + - if: $CI_JOB_NAME == 'pages' + when: never + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs @@ -1658,7 +1674,8 @@ ivas-long-term-job-logs: # TODO add variable to inject logs from backup copy # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + #id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + id_previous="-1" echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" if [ "$id_previous" != "-1" ]; then -- GitLab From 9f2a842fedfc8d9af3b7d15f6e9796a09a19fbce Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 17:16:54 +0200 Subject: [PATCH 40/50] Move to test stage --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1bd852a5d..8fb40a0ce 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1660,7 +1660,7 @@ ivas-long-term-job-logs: - if: $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == "long-term-logs" tags: - ivas-basop-linux - stage: .pre + stage: .test timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] -- GitLab From c863ddfc1639cfd5216851dad14ff25e540cf72f Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Fri, 11 Jul 2025 17:18:25 +0200 Subject: [PATCH 41/50] Maintenance stage --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8fb40a0ce..8b40ca840 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1660,7 +1660,7 @@ ivas-long-term-job-logs: - if: $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == "long-term-logs" tags: - ivas-basop-linux - stage: .test + stage: maintenance timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] -- GitLab From 268db15d6387d2a286301a20ea7bea460bba9455 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sat, 12 Jul 2025 22:09:07 +0200 Subject: [PATCH 42/50] Add job for storing backup copy of logs, and way to inject backups --- .gitlab-ci.yml | 52 +++++++++++++++++++++++++++++++++++++--- .gitlab-ci/variables.yml | 4 ++++ 2 files changed, 53 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8b40ca840..10f8513b7 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -32,6 +32,9 @@ workflow: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'long-term-logs' variables: IVAS_PIPELINE_NAME: 'Aggregate long term logs: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'backup-long-term-logs' + variables: + IVAS_PIPELINE_NAME: 'Backup long term logs: $CI_COMMIT_BRANCH' - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare' variables: IVAS_PIPELINE_NAME: 'Run comparison tools against float ref: $CI_COMMIT_BRANCH' @@ -1659,19 +1662,24 @@ ivas-long-term-job-logs: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" - if: $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == "long-term-logs" tags: - - ivas-basop-linux + - $RUNNER_TAG stage: maintenance timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] - | set -euxo pipefail + echo "Running on RUNNER_TAG: $RUNNER_TAG" bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - # Get previous logs of this job incase we are running on a different machine + # Clean up logs if [ -d "logs" ]; then rm -rf "logs"; fi - # TODO add variable to inject logs from backup copy + + # Inject logs from backup copy if LOGS_BACKUP_SOURCE_DIR is set + if [ -d "$LOGS_BACKUP_SOURCE_DIR" ]; then + cp -r $LOGS_BACKUP_SOURCE_DIR/logs . + fi # handle case where the job is running for the first time. e.g where the job was not found. #id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) @@ -1719,6 +1727,44 @@ ivas-long-term-job-logs: expose_as: "ivas long term job logs results" +backup-long-term-job-logs: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "backup-long-term-logs" + - if: $CI_PIPELINE_SOURCE == 'schedule' && $MANUAL_PIPELINE_TYPE == "backup-long-term-logs" + tags: + - $RUNNER_TAG + stage: maintenance + timeout: "25 minutes" + script: + - !reference [ .job-linux, before_script ] + - | + set -euxo pipefail + echo "Running on RUNNER_TAG: $RUNNER_TAG" + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + echo "Job name from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" + + if [ "$id_previous" != "-1" ]; then + # Unzip artifacts to recover past logs dir + curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + unzip artifacts.zip + else + # create logs dir if it doesn't exist + mkdir logs + fi + + # Store logs as backup copy if LOGS_BACKUP_TARGET_DIR is set + if [ -d "$LOGS_BACKUP_TARGET_DIR" ]; then + cp -r logs $LOGS_BACKUP_TARGET_DIR/logs + exit 0; + else + echo "Error: LOGS_BACKUP_TARGET_DIR not set for backup job!" + exit -1; + fi + + # --------------------------------------------------------------- diff --git a/.gitlab-ci/variables.yml b/.gitlab-ci/variables.yml index a804cdddb..96ef1f507 100644 --- a/.gitlab-ci/variables.yml +++ b/.gitlab-ci/variables.yml @@ -46,6 +46,9 @@ variables: FLOAT_REF_COMMIT_FILE: "float-ref-git-sha.txt" CUT_COMMIT_FILE: "CuT-git-sha.txt" MERGE_TARGET_COMMIT_FILE: "merge-target-git-sha.txt" + RUNNER_TAG: "ivas-basop-linux" + LOGS_BACKUP_SOURCE_DIR: "" + LOGS_BACKUP_TARGET_DIR: "" MANUAL_PIPELINE_TYPE: description: "Type for the manual pipeline run. Use 'pytest-compare' to run comparison test against reference float codec." value: 'default' @@ -66,3 +69,4 @@ variables: - 'renderer-framesize-be' - 'peaq-enc-passthrough' - 'long-term-logs' + - 'backup-long-term-logs' -- GitLab From b2683cf3ca3c38eeae309e481f38e8699e4f5604 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sat, 12 Jul 2025 22:15:21 +0200 Subject: [PATCH 43/50] Add comments to explain logs backup procedure --- .gitlab-ci.yml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 10f8513b7..3eed21fb2 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1656,7 +1656,11 @@ voip-be-on-merge-request: - make -j >> /dev/null - python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py - +# To inject backup copy in manual trigger: +# - Store logs backup in accessible folder, e.g. /tmp/ivas +# - Set MANUAL_PIPELINE_TRIGGER to long-term-logs +# - Set RUNNER_TAG to specific runner with this folder prepared, e.g. test-ericsson-linux-runner-5 +# - Set LOGS_BACKUP_SOURCE_DIR to source folder, e.g. /tmp/ivas ivas-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" @@ -1715,9 +1719,6 @@ ivas-long-term-job-logs: ls logs exit 0 - allow_failure: - exit_codes: - - 123 artifacts: name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" expire_in: 4 weeks @@ -1726,7 +1727,11 @@ ivas-long-term-job-logs: - logs expose_as: "ivas long term job logs results" - +# To store backup copy: +# - Prepare accessible folder for backup, e.g. /tmp/ivas +# - Set MANUAL_PIPELINE_TRIGGER to backup-long-term-logs +# - Set RUNNER_TAG to specific runner with this folder prepared, e.g. test-ericsson-linux-runner-5 +# - Set LOGS_BACKUP_TARGET_DIR to source folder, e.g. /tmp/ivas backup-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "backup-long-term-logs" -- GitLab From 157362c71691b46ff05f46c4a1c7c0c2e0fa65ee Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sat, 12 Jul 2025 22:16:47 +0200 Subject: [PATCH 44/50] Remove temporary disabling of logs fetch --- .gitlab-ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3eed21fb2..4eb39a50e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1686,8 +1686,7 @@ ivas-long-term-job-logs: fi # handle case where the job is running for the first time. e.g where the job was not found. - #id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - id_previous="-1" + id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" if [ "$id_previous" != "-1" ]; then -- GitLab From f6a2f4a521ee3abfd8ce1496bcbfe967acc85ebb Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sun, 13 Jul 2025 16:35:10 +0200 Subject: [PATCH 45/50] Add '-' in log jobs for consistency. Fix job name in backup-long-term-job-logs --- .gitlab-ci.yml | 124 +++++++++++++++++++++++-------------------------- 1 file changed, 59 insertions(+), 65 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4eb39a50e..da736bfb4 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1671,52 +1671,49 @@ ivas-long-term-job-logs: timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] - - | - set -euxo pipefail - echo "Running on RUNNER_TAG: $RUNNER_TAG" - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + - set -euxo pipefail + - echo "Running on RUNNER_TAG: $RUNNER_TAG" + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - # Clean up logs - if [ -d "logs" ]; then rm -rf "logs"; fi + # Clean up logs + - if [ -d "logs" ]; then rm -rf "logs"; fi + # Inject logs from backup copy if LOGS_BACKUP_SOURCE_DIR is set + - if [ -d "$LOGS_BACKUP_SOURCE_DIR" ]; then + - cp -r $LOGS_BACKUP_SOURCE_DIR/logs . + - fi - # Inject logs from backup copy if LOGS_BACKUP_SOURCE_DIR is set - if [ -d "$LOGS_BACKUP_SOURCE_DIR" ]; then - cp -r $LOGS_BACKUP_SOURCE_DIR/logs . - fi + # handle case where the job is running for the first time. e.g where the job was not found. + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) + - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - - if [ "$id_previous" != "-1" ]; then + - if [ "$id_previous" != "-1" ]; then # Unzip artifacts to recover past logs dir - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip - else + - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip + - else # create logs dir if it doesn't exist - mkdir logs - fi + - mkdir logs + - fi - # create folder for today's results - TODAY=$(date +'%Y-%m-%d') - mkdir -p logs/$TODAY - - # Aggregate job logs - job_names="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" - for job_name in $job_names; do - echo "Getting job logs for: $job_name" - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip -d previous_artifacts - # This wildcard thingy relies on only one csv file being present per job - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY - rm artifacts.zip - rm -r previous_artifacts - done - ls logs - exit 0 + # create folder for today's results + - TODAY=$(date +'%Y-%m-%d') + - mkdir -p logs/$TODAY + + # Aggregate job logs + - job_names="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" + - for job_name in $job_names; do + - echo "Getting job logs for: $job_name" + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) + - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" + - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d previous_artifacts + - mv previous_artifacts/mld--"$job_name"-$id_previous--sha-*.csv logs/$TODAY + - rm artifacts.zip + - rm -r previous_artifacts + - done + - ls logs + - exit 0 artifacts: name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" @@ -1741,34 +1738,31 @@ backup-long-term-job-logs: timeout: "25 minutes" script: - !reference [ .job-linux, before_script ] - - | - set -euxo pipefail - echo "Running on RUNNER_TAG: $RUNNER_TAG" - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh - - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job name from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - - if [ "$id_previous" != "-1" ]; then + - set -euxo pipefail + - echo "Running on RUNNER_TAG: $RUNNER_TAG" + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh + + # Find ID of last run of ivas-long-term-job-logs + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-long-term-job-logs $CI_PROJECT_ID) + + - if [ "$id_previous" != "-1" ]; then # Unzip artifacts to recover past logs dir - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - unzip artifacts.zip - else + - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip + - else # create logs dir if it doesn't exist - mkdir logs - fi - - # Store logs as backup copy if LOGS_BACKUP_TARGET_DIR is set - if [ -d "$LOGS_BACKUP_TARGET_DIR" ]; then - cp -r logs $LOGS_BACKUP_TARGET_DIR/logs - exit 0; - else - echo "Error: LOGS_BACKUP_TARGET_DIR not set for backup job!" - exit -1; - fi + - mkdir logs + - fi - + # Store logs as backup copy if LOGS_BACKUP_TARGET_DIR is set + - if [ -d "$LOGS_BACKUP_TARGET_DIR" ]; then + - cp -r logs $LOGS_BACKUP_TARGET_DIR + - exit 0; + - else + - echo "Error: LOGS_BACKUP_TARGET_DIR not set for backup job!" + - exit -1; + - fi # --------------------------------------------------------------- -- GitLab From 38c9d7ebf050fd5b3a340a4b493e240a8d094691 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sun, 13 Jul 2025 16:39:22 +0200 Subject: [PATCH 46/50] Change example backup folder in comment --- .gitlab-ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index da736bfb4..890c6588e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1657,10 +1657,10 @@ voip-be-on-merge-request: - python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py # To inject backup copy in manual trigger: -# - Store logs backup in accessible folder, e.g. /tmp/ivas +# - Store logs backup in accessible folder, e.g. /usr/local/backup # - Set MANUAL_PIPELINE_TRIGGER to long-term-logs # - Set RUNNER_TAG to specific runner with this folder prepared, e.g. test-ericsson-linux-runner-5 -# - Set LOGS_BACKUP_SOURCE_DIR to source folder, e.g. /tmp/ivas +# - Set LOGS_BACKUP_SOURCE_DIR to source folder, e.g. /usr/local/backup ivas-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" @@ -1724,10 +1724,10 @@ ivas-long-term-job-logs: expose_as: "ivas long term job logs results" # To store backup copy: -# - Prepare accessible folder for backup, e.g. /tmp/ivas +# - Prepare accessible folder for backup, e.g. /usr/local/backup # - Set MANUAL_PIPELINE_TRIGGER to backup-long-term-logs # - Set RUNNER_TAG to specific runner with this folder prepared, e.g. test-ericsson-linux-runner-5 -# - Set LOGS_BACKUP_TARGET_DIR to source folder, e.g. /tmp/ivas +# - Set LOGS_BACKUP_TARGET_DIR to source folder, e.g. /usr/local/backup backup-long-term-job-logs: rules: - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "backup-long-term-logs" -- GitLab From ee10b8d473b9c6f8ea6ec2eb723ae1660d2f60f2 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sun, 13 Jul 2025 16:52:20 +0200 Subject: [PATCH 47/50] Try to fix syntax errors --- .gitlab-ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 890c6588e..c44beb080 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1672,7 +1672,7 @@ ivas-long-term-job-logs: script: - !reference [ .job-linux, before_script ] - set -euxo pipefail - - echo "Running on RUNNER_TAG: $RUNNER_TAG" + - echo "Running on RUNNER_TAG = $RUNNER_TAG" - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh @@ -1685,7 +1685,7 @@ ivas-long-term-job-logs: # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - - echo "Job ID from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" + - echo "Job name from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" - if [ "$id_previous" != "-1" ]; then # Unzip artifacts to recover past logs dir @@ -1703,7 +1703,7 @@ ivas-long-term-job-logs: # Aggregate job logs - job_names="ivas-pytest-compare_ref-long-enc ivas-pytest-compare_ref-long-enc-lev+10 ivas-pytest-compare_ref-long-enc-lev-10 ivas-pytest-compare_ref-long-dec ivas-pytest-compare_ref-long-dec-lev+10 ivas-pytest-compare_ref-long-dec-lev-10 ivas-pytest-compare_ref-long-fx-fx ivas-pytest-compare_ref-long-fx-fx-lev+10 ivas-pytest-compare_ref-long-fx-fx-lev-10" - for job_name in $job_names; do - - echo "Getting job logs for: $job_name" + - echo "Getting job logs for $job_name" - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$job_name" $CI_PROJECT_ID) - echo "Job ID from variables - "$job_name", Job ID from script - $id_previous" - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip @@ -1739,7 +1739,7 @@ backup-long-term-job-logs: script: - !reference [ .job-linux, before_script ] - set -euxo pipefail - - echo "Running on RUNNER_TAG: $RUNNER_TAG" + - echo "Running on RUNNER_TAG = $RUNNER_TAG" - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh @@ -1760,7 +1760,7 @@ backup-long-term-job-logs: - cp -r logs $LOGS_BACKUP_TARGET_DIR - exit 0; - else - - echo "Error: LOGS_BACKUP_TARGET_DIR not set for backup job!" + - echo "Error - LOGS_BACKUP_TARGET_DIR not set for backup job!" - exit -1; - fi -- GitLab From 0b814f35b944b49e5efb621a32b560014ee4499b Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Sun, 13 Jul 2025 17:15:43 +0200 Subject: [PATCH 48/50] Use zip -o in case of overlap between new/injected logs --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c44beb080..32a321bc6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1690,7 +1690,8 @@ ivas-long-term-job-logs: - if [ "$id_previous" != "-1" ]; then # Unzip artifacts to recover past logs dir - curl --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip - - unzip artifacts.zip + # If there is overlap between injected log and new log, use the new logs. + - unzip -o artifacts.zip - else # create logs dir if it doesn't exist - mkdir logs -- GitLab From 5bd99fde185648dfd6320cfc0a6a622de50af72f Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 14 Jul 2025 14:25:19 +0200 Subject: [PATCH 49/50] Fix excluding build jobs for pages job --- .gitlab-ci.yml | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 32a321bc6..3b3957c70 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -845,9 +845,7 @@ clang-format-check: # ensure that codec builds on linux build-codec-linux-make: rules: - - if: $CI_JOB_NAME == 'pages' - when: never - - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' || $MANUAL_PIPELINE_TYPE == 'backup-long-term-logs' || $UPDATE_PAGES when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -866,9 +864,7 @@ build-codec-linux-make: # ensure that codec builds on linux with instrumentation active build-codec-linux-instrumented-make: rules: - - if: $CI_JOB_NAME == 'pages' - when: never - - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' || $MANUAL_PIPELINE_TYPE == 'backup-long-term-logs' || $UPDATE_PAGES when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -887,9 +883,7 @@ build-codec-linux-instrumented-make: build-codec-linux-debugging-make: rules: - - if: $CI_JOB_NAME == 'pages' - when: never - - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' || $MANUAL_PIPELINE_TYPE == 'backup-long-term-logs' || $UPDATE_PAGES when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -909,9 +903,7 @@ build-codec-linux-debugging-make: build-codec-windows-msbuild: rules: - - if: $CI_JOB_NAME == 'pages' - when: never - - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' + - if: $MANUAL_PIPELINE_TYPE == 'long-term-logs' || $MANUAL_PIPELINE_TYPE == 'backup-long-term-logs' || $UPDATE_PAGES when: never - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH -- GitLab From 027155f5e7083a465cb1950cf85f6f1d6c28ceba Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 14 Jul 2025 14:50:54 +0200 Subject: [PATCH 50/50] Fix for non-set LOGS_BACKUP_SOURCE_DIR --- .gitlab-ci.yml | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3b3957c70..1aea80f40 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1671,11 +1671,15 @@ ivas-long-term-job-logs: # Clean up logs - if [ -d "logs" ]; then rm -rf "logs"; fi # Inject logs from backup copy if LOGS_BACKUP_SOURCE_DIR is set - - if [ -d "$LOGS_BACKUP_SOURCE_DIR" ]; then - - cp -r $LOGS_BACKUP_SOURCE_DIR/logs . + - if [ "$LOGS_BACKUP_SOURCE_DIR" != "" ]; then + - cp_ret=0 + - cp -r $LOGS_BACKUP_SOURCE_DIR/logs ./ || cp_ret=$? + - if [ "$cp_ret" != 0 ]; then + - echo "Error -- Copying to $LOGS_BACKUP_SOURCE_DIR failed!" + - exit 1 + - fi - fi - # handle case where the job is running for the first time. e.g where the job was not found. - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH "$CI_JOB_NAME" $CI_PROJECT_ID) - echo "Job name from variables - "$CI_JOB_NAME", Job ID from script - $id_previous" @@ -1685,7 +1689,8 @@ ivas-long-term-job-logs: # If there is overlap between injected log and new log, use the new logs. - unzip -o artifacts.zip - else - # create logs dir if it doesn't exist + # create logs dir if it doesn't exist (should only happen in first run) + - echo "Could not find previous job, creating empty logs folder. If this is not the first run, an error likely happened!" - mkdir logs - fi @@ -1751,10 +1756,10 @@ backup-long-term-job-logs: # Store logs as backup copy if LOGS_BACKUP_TARGET_DIR is set - if [ -d "$LOGS_BACKUP_TARGET_DIR" ]; then - cp -r logs $LOGS_BACKUP_TARGET_DIR - - exit 0; + - exit 0 - else - echo "Error - LOGS_BACKUP_TARGET_DIR not set for backup job!" - - exit -1; + - exit 1 - fi -- GitLab