From 4f92351bada978249e9e3a67c876b0e46d03a109 Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Tue, 19 Nov 2024 08:20:24 +0100 Subject: [PATCH] add ci file from main --- .gitlab-ci.yml | 1533 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1533 insertions(+) create mode 100644 .gitlab-ci.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 000000000..4843bd3bf --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,1533 @@ +variables: + TESTV_DIR: "/usr/local/testv" + LTV_DIR: "/usr/local/ltv" + EVS_BE_TEST_DIR_BASOP: "/usr/local/be_2_evs_basop" + REFERENCE_BRANCH: "ivas-float-update" + BUILD_OUTPUT: "build_output.txt" + SCRIPTS_DIR: "/usr/local/scripts" + EXIT_CODE_NON_BE: 123 + EXIT_CODE_FAIL: 1 + LONG_TEST_SUITE: "tests/codec_be_on_mr_nonselection tests/renderer --param_file scripts/config/self_test_ltv.prm --use_ltv" + LONG_TEST_SUITE_NO_RENDERER: "tests/codec_be_on_mr_nonselection --param_file scripts/config/self_test_ltv.prm --use_ltv" + SHORT_TEST_SUITE: "tests/codec_be_on_mr_nonselection" + TEST_SUITE: "" + DUT_ENCODER_PATH: "./IVAS_cod" + DUT_DECODER_PATH: "./IVAS_dec" + REF_ENCODER_PATH: "./IVAS_cod_ref" + REF_DECODER_PATH: "./IVAS_dec_ref" + LEVEL_SCALING: "1.0" + IVAS_PIPELINE_NAME: '' + BASOP_CI_BRANCH_PC_REPO: "basop-ci-branch" + PRM_FILES: "scripts/config/self_test.prm scripts/config/self_test_ltv.prm" + TESTCASE_TIMEOUT_STV: 900 + TESTCASE_TIMEOUT_LTV: 2400 + TESTCASE_TIMEOUT_STV_SANITIZERS: 1800 + CI_REGRESSION_THRESH_MLD: "0.1" + CI_REGRESSION_THRESH_MAX_ABS_DIFF: "50" + CI_REGRESSION_THRESH_SSNR: "-1" + CI_REGRESSION_THRESH_ODG: "-0.05" + GIT_CLEAN_FLAGS: -ffdxq + INSTR_DIR: "scripts/c-code_instrument" + MANUAL_PIPELINE_TYPE: + description: "Type for the manual pipeline run. Use 'pytest-compare' to run comparison test against reference float codec." + value: 'default' + options: + - 'default' + - 'pytest-compare' + - 'pytest-compare-long' + - 'pytest-compare-to-input' + - 'pytest-saturation-smoke-test' + - 'evs-26444' + - 'sanitizer-stv' + - 'pytest-renderer' + - 'complexity' + - 'coverage' + - 'voip-be-test' + + +default: + interruptible: true # Make all jobs by default interruptible + +workflow: + name: '$IVAS_PIPELINE_NAME' + rules: + # see https://docs.gitlab.com/ee/ci/yaml/workflow.html#switch-between-branch-pipelines-and-merge-request-pipelines + - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push" + when: never + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + variables: + IVAS_PIPELINE_NAME: 'MR pipeline: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' + ### disabled for now because pipeline cd is redundant with MR pipeline with current workflow + # - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Pushes to main + # variables: + # IVAS_PIPELINE_NAME: 'Push pipeline: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'default' # for testing + variables: + IVAS_PIPELINE_NAME: 'Web run pipeline: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare' + variables: + IVAS_PIPELINE_NAME: 'Run comparison tools against float ref: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare-long' + variables: + IVAS_PIPELINE_NAME: 'Run comparison tools against float ref (long test vectors): $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare-to-input' + variables: + IVAS_PIPELINE_NAME: 'Run comparison tools against input (pass-through only): $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-saturation-smoke-test' + variables: + IVAS_PIPELINE_NAME: 'Run saturation smoke-test: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'evs-26444' + variables: + IVAS_PIPELINE_NAME: 'EVS 26.444 test: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'sanitizer-stv' + variables: + IVAS_PIPELINE_NAME: 'Short testvectors sanitizers' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-renderer' + variables: + IVAS_PIPELINE_NAME: 'Renderer test: $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'complexity' + variables: + IVAS_PIPELINE_NAME: 'Complexity Measurement on $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'voip-be-test' + variables: + IVAS_PIPELINE_NAME: 'Voip BE test on $CI_COMMIT_BRANCH' + - if: $CI_PIPELINE_SOURCE == 'schedule' # Scheduled in any branch + variables: + IVAS_PIPELINE_NAME: 'Scheduled pipeline: $CI_COMMIT_BRANCH' + + +stages: + - .pre + - prevalidate + - build + - test + - deploy + +# --------------------------------------------------------------- +# Generic script anchors +# --------------------------------------------------------------- + +# These can be used later on to do common tasks + +# Prints useful information for every job and should be used at the beginning of each job +.print-common-info: &print-common-info + - | + echo "Printing common information for build job." + echo "Current job is run on commit $CI_COMMIT_SHA" + echo "Commit time was $CI_COMMIT_TIMESTAMP" + date | xargs echo "System time is" + +.build-reference-binaries: &build-reference-binaries +- current_commit_sha=$(git rev-parse HEAD) +### build reference binaries +- git checkout $REFERENCE_BRANCH +- git pull +- make clean +- make -j +- mv ./IVAS_cod ./$REF_ENCODER_PATH +- mv ./IVAS_dec ./$REF_DECODER_PATH +- mv ./IVAS_rend ./IVAS_rend_ref +### Return to current branch +- git restore . +- git checkout $current_commit_sha + + +.build-reference-and-dut-binaries: &build-reference-and-dut-binaries +### build reference binaries + - *build-reference-binaries +### build dut binaries + - make clean + - make -j + +.build-and-create-reference-outputs: &build-and-create-reference-outputs + - *build-reference-and-dut-binaries + + ### prepare pytest + # create short test vectors + - python3 tests/create_short_testvectors.py + # create references + - exit_code=0 + - python3 -m pytest $TEST_SUITE -v --update_ref 1 --create_ref -n auto --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH || exit_code=$? + +.update-scripts-repo: &update-scripts-repo + - cd $SCRIPTS_DIR + - sed -i '/fetch/d' .git/config # Remove all fetch lines to clean out dead links + - git remote set-branches --add origin $BASOP_CI_BRANCH_PC_REPO # Add currently used branch + - git fetch + - git restore . # Just as a precaution + - git checkout $BASOP_CI_BRANCH_PC_REPO + - git pull + - cd - + - cp -r $SCRIPTS_DIR/ci . + - cp -r $SCRIPTS_DIR/scripts . + - cp -r $SCRIPTS_DIR/tests . + - cp $SCRIPTS_DIR/pytest.ini . + +.apply-testv-scaling: &apply-testv-scaling + - echo "Applying level scaling in scripts/testv using scale=$LEVEL_SCALING" + - tests/scale_pcm.py ./scripts/testv/ $LEVEL_SCALING + +.update-ltv-repo: &update-ltv-repo + - cd $LTV_DIR + - git pull + - cd - + +.copy-ltv-files-to-testv-dir: ©-ltv-files-to-testv-dir + - cp "$LTV_DIR"/*.wav scripts/testv/ + - cp "$LTV_DIR"/*.met scripts/testv/ + - cp "$LTV_DIR"/*.csv scripts/testv/ + +.activate-Werror-linux: &activate-Werror-linux + - sed -i.bak "s/^# \(CFLAGS += -Werror\)/\1/" Makefile + +.rules-pytest-to-ref-short: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare" + - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'schedule' + when: never + +.rules-pytest-to-input-short: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare-to-input" + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'schedule' + when: never + +# TODO: only temporary as long the MR encoder tests should not compare to main +.rules-pytest-to-ref-enc-short-temp: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare" + - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'schedule' + when: never + +.rules-pytest-to-main-short: + rules: + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + +.rules-pytest-long: + rules: + - if: $PYTEST_MLD_LONG # Set by scheduled pipeline + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare-long" + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + when: never + +.rules-coverage: + rules: + - if: $COVERAGE_TEST # Set by scheduled pipeline + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "coverage" + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + when: never + +.rules-pytest-saturation-smoke-test: + rules: + - if: $PYTEST_SMOKE_TEST # Set by scheduled pipeline + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-saturation-smoke-test" + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + when: never + +# --------------------------------------------------------------- +# Job templates +# --------------------------------------------------------------- + +# templates to define stages and platforms +.test-job-linux: + tags: + - ivas-basop-linux + +.build-job-linux: + stage: build + timeout: "2 minutes" + needs: [] + tags: + - ivas-basop-linux + +# template for test jobs on linux that need the TESTV_DIR +.test-job-linux-needs-testv-dir: + extends: .test-job-linux + before_script: + - *update-scripts-repo + - if [ ! -d "$TESTV_DIR" ]; then mkdir -p $TESTV_DIR; fi + - cp -r scripts/testv/* $TESTV_DIR/ + +.ivas-pytest-anchor: &ivas-pytest-anchor + stage: test + needs: ["build-codec-linux-make"] + timeout: "240 minutes" + variables: + # keep "mld" in artifact name for backwards compatibility reasons + CSV_ARTIFACT_NAME: "mld--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv" + MERGED_CSV_ARTIFACT_NAME: "$CI_JOB_NAME--merged_csv--$CI_JOB_ID.csv" + PAGES_HTML_ARTIFACT_NAME: "$CI_JOB_NAME-index.html" + SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html" + IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME" + script: + - *print-common-info + - *update-scripts-repo + - if [ $USE_LTV -eq 1 ]; then + - *update-ltv-repo + - *copy-ltv-files-to-testv-dir + - testcase_timeout=$TESTCASE_TIMEOUT_LTV + - else + - testcase_timeout=$TESTCASE_TIMEOUT_STV + - fi + + - python3 ci/remove_unsupported_testcases.py $PRM_FILES + - if [ $LEVEL_SCALING != "1.0" ];then + - *apply-testv-scaling + - fi + - *build-and-create-reference-outputs + + ### run pytest + - exit_code=0 + - python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld --ssnr --odg -n auto --testcase_timeout $testcase_timeout --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH || exit_code=$? + - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true + + - python3 scripts/parse_xml_report.py report-junit.xml $CSV_ARTIFACT_NAME + - mkdir $IMAGES_ARTIFACT_NAME + - for MEASURE in MLD DIFF SSNR ODG;do python3 scripts/create_histogram_summary.py $CSV_ARTIFACT_NAME $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".csv $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".png --measure $MEASURE; done + - python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME + + - if [ $USE_LTV -eq 1 ] && [ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]; then + - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH $CI_JOB_NAME $CI_PROJECT_ID) + - echo "Job ID from variables - $CI_JOB_ID, Job ID from script - $id_previous" + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip + - unzip artifacts.zip -d previous_artifacts + # This wildcard thingy relies on only one csv file being present per job + - file_previous="previous_artifacts/mld--$CI_JOB_NAME-$id_previous--sha-*.csv" + - python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME + - else + # create empty file for artifacts to avoid errors + - touch $PAGES_HTML_ARTIFACT_NAME + - touch $MERGED_CSV_ARTIFACT_NAME + - fi + + - if [ $zero_errors != 1 ]; then + - echo "Run errors encountered!" + # TODO: temporary only to not fail MR pipelines on crashes + - if [ $CI_PIPELINE_SOURCE == 'merge_request_event' ]; then + - exit $EXIT_CODE_NON_BE + - else + - exit $EXIT_CODE_FAIL + - fi + - fi + - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi + - exit 0 + + allow_failure: + exit_codes: + - 123 + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + - report-junit.xml + - report.html + - $PAGES_HTML_ARTIFACT_NAME + - $CSV_ARTIFACT_NAME + - $MERGED_CSV_ARTIFACT_NAME + - $SUMMARY_HTML_ARTIFACT_NAME + - $IMAGES_ARTIFACT_NAME + expose_as: "pytest compare results" + reports: + junit: + - report-junit.xml + +.ivas-pytest-on-merge-request-anchor: &ivas-pytest-on-merge-request-anchor + stage: test + needs: ["build-codec-linux-make"] + timeout: "300 minutes" + variables: + XML_REPORT_BRANCH: "report-junit-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.xml" + XML_REPORT_MAIN: "report-junit-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.xml" + HTML_REPORT_BRANCH: "report-junit-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.html" + HTML_REPORT_MAIN: "report-junit-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.html" + CSV_BRANCH: "scores-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.csv" + CSV_MAIN: "scores-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.csv" + IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME" + SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html" + script: + - *print-common-info + - *update-scripts-repo + - if [ $USE_LTV -eq 1 ]; then + - *update-ltv-repo + - *copy-ltv-files-to-testv-dir + - testcase_timeout=$TESTCASE_TIMEOUT_LTV + - else + - testcase_timeout=$TESTCASE_TIMEOUT_STV + - fi + + - python3 ci/remove_unsupported_testcases.py $PRM_FILES + - if [ $LEVEL_SCALING != "1.0" ];then + - *apply-testv-scaling + - fi + + # check MR title for flag that allows regressions to be mergable + - echo $CI_MERGE_REQUEST_TITLE > tmp.txt + - allow_regressions_flag=$(grep -c --ignore-case "\[allow[ -]*regression\]" tmp.txt) || true + + ### run branch first + # this per default builds the branch and the reference and creates the reference outputs + - *build-and-create-reference-outputs + - exit_code=0 + - python3 -m pytest --tb=no $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_BRANCH --self-contained-html --junit-xml=$XML_REPORT_BRANCH --mld --ssnr --odg --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || exit_code=$? + - zero_errors_branch=$(cat $XML_REPORT_BRANCH | grep -c 'errors="0"') || true + - python3 scripts/parse_xml_report.py $XML_REPORT_BRANCH $CSV_BRANCH + + # Store branch outputs for comparison + - mv tests/dut tests/dut_branch + + # create the summary based on the branch + - mkdir $IMAGES_ARTIFACT_NAME + - for MEASURE in MLD DIFF SSNR ODG;do python3 scripts/create_histogram_summary.py $CSV_BRANCH $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".csv $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".png --measure $MEASURE; done + - python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME + + ### run main now + - git checkout $CI_MERGE_REQUEST_TARGET_BRANCH_NAME + - git pull + - make clean + - make -j + - python3 -m pytest --tb=no $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_MAIN --self-contained-html --junit-xml=$XML_REPORT_MAIN --mld --ssnr --odg --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true + - python3 scripts/parse_xml_report.py $XML_REPORT_MAIN $CSV_MAIN + + # If outputs of main and branch are equal, have equal reports and no run errors were encountered, the job will pass. + - diff_sba=0 + - diff_param=0 + - diff_report=0 + - python3 scripts/batch_comp_audio.py --tool pyaudio3dtools -sd tests/dut/sba_bs/raw tests/dut_branch/sba_bs/raw || diff_sba=$? + - python3 scripts/batch_comp_audio.py --tool pyaudio3dtools -sd tests/dut/param_file/dec tests/dut_branch/param_file/dec || diff_param=$? + - diff $CSV_BRANCH $CSV_MAIN || diff_report=$? + - if [ $diff_param -eq 0 ] && [ $diff_sba -eq 0 ] && [ $diff_report -eq 0 ] && [ $zero_errors_branch -eq 1 ]; then + - echo "Output BE to main, identical report and no run errors encountered." + # Add dummy files to avoid warning on missing artifacts + - touch changes_crashes.csv + - touch changes_MLD.csv + - touch changes_MAXIMUM_ABS_DIFF.csv + - touch changes_MIN_SSNR.csv + - touch changes_MIN_ODG.csv + - exit 0; + - fi + + ### compare the two csv files for regressions + - regressions_found=0 + - python3 scripts/basop_check_for_changes_in_testcases.py $CSV_BRANCH $CSV_MAIN || regressions_found=$? + + - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit_code=$EXIT_CODE_NON_BE; fi + - if [ $zero_errors_branch != 1 ]; then echo "Run errors encountered!"; exit_code=$EXIT_CODE_NON_BE; fi + - if [ $regressions_found != 0 ]; then + - if [ $allow_regressions_flag == 0 ]; then + - echo "Detected regression wrt to main, [allow regression] not set!" + - exit_code=$EXIT_CODE_FAIL; + - else + - echo "Detected regression wrt to main, [allow regression] set." + - exit_code=$EXIT_CODE_NON_BE; + - fi + - fi + + - exit $exit_code + after_script: + - rm -rf tests/dut tests/ref + allow_failure: + exit_codes: + - 123 + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + - $XML_REPORT_BRANCH + - $XML_REPORT_MAIN + - $HTML_REPORT_BRANCH + - $HTML_REPORT_MAIN + - $CSV_BRANCH + - $CSV_MAIN + - $SUMMARY_HTML_ARTIFACT_NAME + - $IMAGES_ARTIFACT_NAME + - changes_crashes.csv + - changes_MLD.csv + - changes_MAXIMUM_ABS_DIFF.csv + - changes_MIN_SSNR.csv + - changes_MIN_ODG.csv + expose_as: "pytest compare results" + reports: + junit: + - $XML_REPORT_BRANCH + - $XML_REPORT_MAIN + + +.ivas-pytest-sanitizers-anchor: &ivas-pytest-sanitizers-anchor + stage: test + needs: ["build-codec-linux-make"] + timeout: "90 minutes" + rules: + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' + when: never + - if: $CI_PIPELINE_SOURCE == 'schedule' && $IVAS_PYTEST_MSAN + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "sanitizer-stv" + script: + - *print-common-info + - *update-scripts-repo + - python3 ci/remove_unsupported_testcases.py $PRM_FILES + - *build-reference-and-dut-binaries + - make clean + - make -j CLANG=$CLANG_NUM + - if [[ $CLANG_NUM == 3 ]]; then export UBSAN_OPTIONS="suppressions=scripts/ubsan.supp,report_error_type=1"; fi + - testcase_timeout=$TESTCASE_TIMEOUT_STV_SANITIZERS + - python3 -m pytest $SHORT_TEST_SUITE -v --tb=no --update_ref 1 --html=report.html --self-contained-html --junit-xml=report-junit.xml --testcase_timeout $testcase_timeout --ref_encoder_path $DUT_ENCODER_PATH --ref_decoder_path $DUT_DECODER_PATH + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + when: always + expire_in: "2 weeks" + paths: + - report-junit.xml + - report.html + reports: + junit: + - report-junit.xml + +.ivas-pytest-compare-to-input-anchor: &ivas-pytest-compare-to-input-anchor + stage: test + needs: ["build-codec-linux-make"] + timeout: "240 minutes" + variables: + SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html" + IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME" + script: + - *print-common-info + - *update-scripts-repo + - if [ $USE_LTV -eq 1 ]; then + - *update-ltv-repo + - *copy-ltv-files-to-testv-dir + - testcase_timeout=$TESTCASE_TIMEOUT_LTV + - else + - testcase_timeout=$TESTCASE_TIMEOUT_STV + - fi + + - python3 ci/remove_unsupported_testcases.py $PRM_FILES + - if [ $LEVEL_SCALING != "1.0" ];then + - *apply-testv-scaling + - fi + - *build-reference-and-dut-binaries + + ### run pytest + - exit_code=0 + - python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report-ref.html --self-contained-html --junit-xml=report-junit-ref.xml --mld --ssnr --odg -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $REF_ENCODER_PATH --dut_decoder_path $REF_DECODER_PATH --compare_to_input || exit_code=$? + - python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report-dut.html --self-contained-html --junit-xml=report-junit-dut.xml --mld --ssnr --odg -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH --compare_to_input || exit_code=$? + - zero_errors_ref=$(cat report-junit-ref.xml | grep -c 'errors="0"') || true + - zero_errors_dut=$(cat report-junit-dut.xml | grep -c 'errors="0"') || true + - python3 scripts/parse_xml_report.py report-junit-ref.xml report-ref.csv + - python3 scripts/parse_xml_report.py report-junit-dut.xml report-dut.csv + - python3 scripts/diff_report.py report-ref.csv report-dut.csv report-diff.csv + + # create summary + - mkdir $IMAGES_ARTIFACT_NAME + - for MEASURE in MLD DIFF SSNR ODG;do python3 scripts/create_histogram_summary.py report-diff.csv $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".csv $IMAGES_ARTIFACT_NAME/summary_"$MEASURE".png --measure $MEASURE --diff; done + - python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME + + - exit 0 + + allow_failure: + exit_codes: + - 123 + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + - report-junit-ref.xml + - report-ref.html + - report-ref.csv + - report-junit-dut.xml + - report-dut.html + - report-dut.csv + - report-diff.csv + - $IMAGES_ARTIFACT_NAME + - $SUMMARY_HTML_ARTIFACT_NAME + expose_as: "pytest compare to input results" + reports: + junit: + - report-junit-ref.xml + - report-junit-dut.xml + +# --------------------------------------------------------------- +# .pre jobs for setting up things +# --------------------------------------------------------------- + +# See: https://gitlab.com/gitlab-org/gitlab/-/issues/194023 +# Solution to make main branch pipelines uninterruptible while all other +# pipelines can be interrupted by default. This works because all jobs +# after uninterruptible jobs will be uninterruptible. Resource group +# setting avoids rare case where two fast merges could still interrupt +# pipeline. This should be revisited if there are updates to Gitlab. +uninterruptible: + stage: .pre + interruptible: false + resource_group: uninterruptible + script: + - echo "$CI_COMMIT_BRANCH is uninterruptible" + rules: + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + when: always + tags: + - ivas-basop-linux + +# --------------------------------------------------------------- +# verification jobs +# --------------------------------------------------------------- + +clang-format-check: + extends: + - .test-job-linux + rules: + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + - if: $CI_PIPELINE_SOURCE == 'schedule' + when: never + variables: + ARTIFACT_BASE_NAME: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--formatting-fix" + stage: prevalidate + needs: [] + timeout: "5 minutes" + script: + - *update-scripts-repo + # Set up variables. This can't be done in the "variables" section because variables are not expanded properly there + - PATCH_FILE_NAME="$ARTIFACT_BASE_NAME".patch + - > + INSTRUCTIONS_GITLAB="To fix formatting issues:\n + - download the diff patch available as artifact of this job\n + - unzip the artifact and place the patch file in the root directory of your local IVAS repo\n + - run: git apply $PATCH_FILE_NAME\n + - commit new changes" + - > + INSTRUCTIONS_README="To fix formatting issues:\n + - place the patch file in the root directory of your local IVAS repo\n + - run: git apply $PATCH_FILE_NAME\n + - commit new changes" + + - format_problems=0 + - scripts/check-format.sh -afD -p 8 || format_problems=$? + - if [ $format_problems == 0 ] ; then exit 0; fi + + - mkdir tmp-formatting-fix + - git diff > "tmp-formatting-fix/$PATCH_FILE_NAME" + + # Print instructions to job output + - echo -e "$INSTRUCTIONS_GITLAB" + + # Include readme in the artifact, in case someone misses the job printout (e.g. getting the artifact via MR interface) + - echo -e "$INSTRUCTIONS_README" > "tmp-formatting-fix/readme.txt" + + - exit $format_problems + artifacts: + expire_in: 1 day + paths: + - tmp-formatting-fix/ + when: on_failure + name: "$ARTIFACT_BASE_NAME" + expose_as: "formatting patch" + +# --------------------------------------------------------------- +# Build jobs +# --------------------------------------------------------------- + +# ensure that codec builds on linux +build-codec-linux-make: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' + - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs + - if: $CI_PIPELINE_SOURCE == 'schedule' + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + extends: + - .build-job-linux + script: + - *print-common-info + - *activate-Werror-linux + - make -j + +# ensure that codec builds on linux with instrumentation active +build-codec-linux-instrumented-make: + rules: + - if: $CI_PIPELINE_SOURCE == 'web' + - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main + - if: $CI_PIPELINE_SOURCE == 'schedule' + - if: $CI_PIPELINE_SOURCE == 'push' + when: never + extends: + - .build-job-linux + timeout: "7 minutes" + script: + - *print-common-info + - *update-scripts-repo + - bash scripts/prepare_instrumentation.sh -m MEM_ONLY -p BASOP + - make -j -C $INSTR_DIR + +# --------------------------------------------------------------- +# Short test jobs that run in merge request pipelines +# --------------------------------------------------------------- + +### jobs that test fx encoder -> flt decoder +# TODO: reenable once encoder tests shall compare to main +# ivas-pytest-compare_to_main-short-enc: +# extends: +# - .rules-pytest-to-main-short +# - .test-job-linux +# before_script: +# - USE_LTV=0 +# - DUT_DECODER_PATH=./IVAS_dec_ref +# - TEST_SUITE="$SHORT_TEST_SUITE" +# - LEVEL_SCALING=1.0 +# <<: *ivas-pytest-on-merge-request-anchor + +# ivas-pytest-compare_to_main-short-enc-lev-10: +# extends: +# - .rules-pytest-to-main-short +# - .test-job-linux +# before_script: +# - USE_LTV=0 +# - DUT_DECODER_PATH=./IVAS_dec_ref +# - TEST_SUITE="$SHORT_TEST_SUITE" +# - LEVEL_SCALING=0.3162 +# <<: *ivas-pytest-on-merge-request-anchor + +# ivas-pytest-compare_to_main-short-enc-lev+10: +# extends: +# - .rules-pytest-to-main-short +# - .test-job-linux +# before_script: +# - USE_LTV=0 +# - DUT_DECODER_PATH=./IVAS_dec_ref +# - TEST_SUITE="$SHORT_TEST_SUITE" +# - LEVEL_SCALING=3.162 +# <<: *ivas-pytest-on-merge-request-anchor + +### jobs that test flt encoder -> fx decoder +ivas-pytest-compare_to_main-short-dec: + extends: + - .rules-pytest-to-main-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./IVAS_cod_ref + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=1.0 + - rm -rf tests/dut tests/ref + <<: *ivas-pytest-on-merge-request-anchor + +ivas-pytest-compare_to_main-short-dec-lev-10: + extends: + - .rules-pytest-to-main-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./IVAS_cod_ref + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=0.3162 + - rm -rf tests/dut tests/ref + <<: *ivas-pytest-on-merge-request-anchor + +ivas-pytest-compare_to_main-short-dec-lev+10: + extends: + - .rules-pytest-to-main-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./IVAS_cod_ref + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=3.162 + - rm -rf tests/dut tests/ref + <<: *ivas-pytest-on-merge-request-anchor + +# --------------------------------------------------------------- +# Short test jobs for running from web interface or schedule +# --------------------------------------------------------------- + +### jobs that test fx encoder -> flt decoder +ivas-pytest-compare_to_ref-short-enc: + extends: + #- .rules-pytest-to-ref-short + - .rules-pytest-to-ref-enc-short-temp + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_to_ref-short-enc-lev-10: + extends: + #- .rules-pytest-to-ref-short + - .rules-pytest-to-ref-enc-short-temp + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=0.3162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_to_ref-short-enc-lev+10: + extends: + #- .rules-pytest-to-ref-short + - .rules-pytest-to-ref-enc-short-temp + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=3.162 + <<: *ivas-pytest-anchor + +ivas-pytest-enc-msan: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=1 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +ivas-pytest-enc-asan: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=2 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +ivas-pytest-enc-usan: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=3 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +### jobs that test flt encoder -> fx decoder +ivas-pytest-compare_to_ref-short-dec: + extends: + - .rules-pytest-to-ref-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_to_ref-short-dec-lev-10: + extends: + - .rules-pytest-to-ref-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=0.3162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_to_ref-short-dec-lev+10: + extends: + - .rules-pytest-to-ref-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=3.162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare-to-input-short-dec: + extends: + - .rules-pytest-to-input-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-compare-to-input-anchor + +ivas-pytest-compare-to-input-short-enc: + extends: + - .rules-pytest-to-input-short + - .test-job-linux + before_script: + - USE_LTV=0 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$SHORT_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-compare-to-input-anchor + +ivas-pytest-dec-msan-short: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=1 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +ivas-pytest-dec-asan-short: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=2 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +ivas-pytest-dec-usan-short: + extends: + - .test-job-linux + before_script: + - CLANG_NUM=3 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + <<: *ivas-pytest-sanitizers-anchor + +# --------------------------------------------------------------- +# Long test jobs +# --------------------------------------------------------------- + +ivas-pytest-compare_ref-long-enc: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_ref-long-dec: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_ref-long-enc-lev-10: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=0.3162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_ref-long-dec-lev-10: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=0.3162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_ref-long-enc-lev+10: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_DECODER_PATH=./$REF_DECODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=3.162 + <<: *ivas-pytest-anchor + +ivas-pytest-compare_ref-long-dec-lev+10: + extends: + - .rules-pytest-long + - .test-job-linux + before_script: + - USE_LTV=1 + - DUT_ENCODER_PATH=./$REF_ENCODER_PATH + - TEST_SUITE="$LONG_TEST_SUITE" + - LEVEL_SCALING=3.162 + <<: *ivas-pytest-anchor + +ivas-smoke-test-saturation: + extends: + - .rules-pytest-saturation-smoke-test + - .test-job-linux-needs-testv-dir + script: + - USE_LTV=1 + - LEVEL_SCALING=32768 + + - *print-common-info + - *update-scripts-repo + - if [ $USE_LTV -eq 1 ]; then + - *update-ltv-repo + - *copy-ltv-files-to-testv-dir + - fi + - if [ $LEVEL_SCALING != "1.0" ];then + - *apply-testv-scaling + - fi + - cp -r scripts/testv/* $TESTV_DIR/ + + # skip prepare_mem_dryrun.py script in smoke_test.sh + - sed -i 's/python3 .\/scripts\/prepare_mem_dryrun.py/#python3 .\/scripts\/prepare_mem_dryrun.py/g' ci/smoke_test.sh + + - bash ci/smoke_test.sh + ### analyze for failures + - if ! [ -s smoke_test_output.txt ] || ! [ -s smoke_test_output_plc.txt ] || ! [ -s smoke_test_output_jbm_noEXT.txt ] || ! [ -s smoke_test_output_hrtf.txt ]; then echo "Error in smoke test"; exit 1; fi + - ret_val=0 + - if cat smoke_test_output.txt | grep -c "failed" ; then echo "Smoke test without PLC failed"; ret_val=1; fi + - if cat smoke_test_output_plc.txt | grep -c "failed"; then echo "Smoke test with PLC failed"; ret_val=1; fi + - if cat smoke_test_output_jbm_noEXT.txt | grep -c "failed"; then echo "Smoke test JBM part failed"; ret_val=1; fi + - if cat smoke_test_output_hrtf.txt | grep -c "failed"; then echo "Smoke test with external hrtf files failed"; ret_val=1; fi + - exit $ret_val + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + - smoke_test_output.txt + - smoke_test_output_plc.txt + - smoke_test_output_jbm_noEXT.txt + - smoke_test_output_hrtf.txt + expose_as: "saturation smoke test results" + + +# GCOV/LCOV coverage analysis of self_test suite +coverage-test-on-main-scheduled: + extends: + - .test-job-linux + - .rules-coverage + stage: test + timeout: 3 hours + script: + - *print-common-info + - *update-scripts-repo + - *update-ltv-repo + - *copy-ltv-files-to-testv-dir + - *build-reference-binaries + # Build DuT binaries with GCOV + - make clean + - make GCOV=1 -j + - cp IVAS_rend IVAS_rend_ref # Copy to ensure instrumented renderer is run in the first pytest call + + - testcase_timeout=$TESTCASE_TIMEOUT_LTV + - exit_code_dec=0 + - exit_code_enc=0 + - python3 -m pytest --tb=no tests/codec_be_on_mr_nonselection tests/renderer --update_ref 1 -v --create_ref --html=report-dec.html --self-contained-html --junit-xml=report-junit-dec.xml -n auto --testcase_timeout $testcase_timeout --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $DUT_DECODER_PATH || exit_code_dec=$? + - lcov -c -d obj -o coverage_dec_rend.info # extract coverage of decoder/renderer + + - python3 -m pytest --tb=no tests/codec_be_on_mr_nonselection --encoder_only -v --html=report-enc.html --self-contained-html --junit-xml=report-junit-enc.xml -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $DUT_ENCODER_PATH || exit_code_enc=$? + - lcov -c -d obj -o coverage_enc_dec_rend.info # extract coverage of encoder/decoder/renderer + + # remove apps and lib_util files from coverage + - lcov -r coverage_dec_rend.info "*apps*" -o coverage_dec_rend.info + - lcov -r coverage_dec_rend.info "*lib_util*" -o coverage_dec_rend.info + - lcov -r coverage_enc_dec_rend.info "*apps*" -o coverage_enc_dec_rend.info + - lcov -r coverage_enc_dec_rend.info "*lib_util*" -o coverage_enc_dec_rend.info + + - commit_sha=$(git rev-parse HEAD) + - genhtml coverage_enc_dec_rend.info -o coverage_enc_dec_rend -t "Coverage on main enc/dec/rend @ $commit_sha" + - genhtml coverage_dec_rend.info -o coverage_dec_rend -t "Coverage on main -- dec/rend @ $commit_sha" + artifacts: + name: "main-coverage-sha-$CI_COMMIT_SHORT_SHA" + when: always + expire_in: 1 week + paths: + - coverage_enc_dec_rend.info + - coverage_dec_rend.info + - coverage_enc_dec_rend + - coverage_dec_rend + - report-dec.html + - report-enc.html + expose_as: "Coverage result" + reports: + junit: + - report-junit-dec.xml + - report-junit-enc.xml + +# --------------------------------------------------------------- +# EVS 26.444 test job +# --------------------------------------------------------------- + +# check bitexactness to EVS +be-2-evs-26444: + extends: + - .test-job-linux + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "evs-26444" + - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" + tags: + - be-2-evs-basop + stage: test + timeout: "120 minutes" # To be revisited + script: + - *print-common-info + - *update-scripts-repo + - sed -i".bak" "s/\(#define EVS_FLOAT\)/\/\/\1/" lib_com/options.h + - make -j + + # copy over to never change the testvector dir + - cp -r $EVS_BE_TEST_DIR_BASOP ./evs_be_test + - mkdir -p ./evs_be_test/output/decoded ./evs_be_test/output/bitstreams + + - exit_code=0 + - python3 -m pytest tests/test_26444.py -v --html=report.html --self-contained-html --junit-xml=report-junit.xml -n auto || exit_code=$? + - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_FAIL; fi + - exit 0 + + artifacts: + name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results" + expire_in: 1 week + when: always + paths: + - report-junit.xml + - report.html + expose_as: "EVS 26444 result" + reports: + junit: + - report-junit.xml + +ivas-pytest-renderer: + extends: + - .test-job-linux + rules: + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-renderer" + before_script: + - USE_LTV=0 + - TEST_SUITE="tests/renderer" + - LEVEL_SCALING=1.0 + <<: *ivas-pytest-anchor + + +# --------------------------------------------------------------- +# Various other tests +# --------------------------------------------------------------- + +# TODO: actually run on MR once main problems are fixed +voip-be-on-merge-request: + extends: + - .test-job-linux-needs-testv-dir + rules: + # - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main + - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "voip-be-test" + stage: test + needs: ["build-codec-linux-make"] + timeout: "10 minutes" + script: + - *print-common-info + - make clean + - make -j + - python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py + + +# --------------------------------------------------------------- +# Complexity measurement jobs +# --------------------------------------------------------------- + +.complexity-measurements-setup: + &complexity-measurements-setup # create necessary environment + ### 1. part: mainly same as in float repo - this is boilerplate code to make the gitlab pages presentation work + - mkdir -p wmops/logs + + - job_id=$(python3 ci/get_id_of_last_job_occurence.py $CI_COMMIT_REF_NAME $CI_JOB_NAME $CI_PROJECT_ID) + - echo $job_id + - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts.zip + - unzip artifacts.zip || true # this may fail on first run, when there are no artifacts there and the zip file is actually just "404"-html + - ls + - public_dir="$CI_JOB_NAME-public" + # if is needed to catch case when no artifact is there (first run), similarly as above + - if [[ -d $public_dir ]]; then mv $public_dir/* wmops/; fi + - ls wmops + - rm artifacts.zip + - rm -rf $public_dir + + ### 2. part: setup specific for BASOP repo + # hack for using the reference encoder -> need to build manually to make script use ref enc and BASOP dec + - mkdir COMPLEXITY + - cp IVAS_cod_ref COMPLEXITY/IVAS_cod + # build branch code aain with instrumentation + - make clean + - bash scripts/prepare_instrumentation.sh -p BASOP -m MEM_ONLY + - make -j -C $INSTR_DIR + - cp $INSTR_DIR/IVAS_dec COMPLEXITY/IVAS_dec + +.complexity-measurements-prepare-artifacts: + &complexity-measurements-prepare-artifacts # prepare artifacts -> move to public directory + - public_dir="$CI_JOB_NAME-public" + - mkdir $public_dir + - mv -f wmops/log_*_all.txt wmops/*.js ${public_dir}/ + # move logfiles for links + - mkdir $public_dir/logs + # first move logs + - log_files=$(cat $public_dir/graphs*.js | grep logFile | sed "s/.*\(wmops_newsletter_.*\.csv\).*/\1/g") + - echo $log_files + - ls wmops/logs + - for f in $log_files; do [ -f wmops/logs/$f ] && mv wmops/logs/$f $public_dir/logs/$f; done + # copy index page blueprint + - cp ci/complexity_measurements/index_complexity.html ${public_dir}/index.html + # patch the format in the title + - sed -i "s/IVAS FORMAT/IVAS $in_format to $out_format/g" ${public_dir}/index.html + # do separately here to avoid overwrite complaints by mv + - mv -f ci/complexity_measurements/style.css ${public_dir}/ + - ls $public_dir + +.complexity-template: + extends: + - .test-job-linux + stage: test + variables: + ret_val: 0 + timeout: 3 hours 30 minutes + before_script: + - *print-common-info + - *update-scripts-repo + - *update-ltv-repo + - *build-reference-and-dut-binaries + - *complexity-measurements-setup + - which coan + artifacts: + name: "$CI_JOB_NAME--$CI_COMMIT_REF_NAME--sha-$CI_COMMIT_SHA" + when: always + expire_in: 2 week + paths: + - $CI_JOB_NAME-public + +complexity-stereo-in-stereo-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + script: + - in_format=stereo + - out_format=stereo + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" mem_only || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-ism-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 1 hour + script: + - in_format=ISM + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-ism-in-binaural_room_ir-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 2 hours + script: + - in_format=ISM + - out_format=BINAURAL_ROOM_IR + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-ism-in-ext-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 3 hours 30 minutes + script: + - in_format=ISM + - out_format=EXT + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-sba-hoa3-in-hoa3-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 4 hours 30 minutes + script: + - in_format=HOA3 + - out_format=HOA3 + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-sba-hoa3-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 5 hours 30 minutes + script: + - in_format=HOA3 + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-sba-hoa3-in-binaural_room_ir-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 6 hours 30 minutes + script: + - in_format=HOA3 + - out_format=BINAURAL_ROOM_IR + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-mc-in-7_1_4-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 7 hours 30 minutes + script: + - in_format=MC + - out_format=7_1_4 + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-mc-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 10 hours + script: + - in_format=MC + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-mc-in-binaural_room_ir-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 12 hours 30 minutes + script: + - in_format=MC + - out_format=BINAURAL_ROOM_IR + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-masa-in-ext-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 15 hours + script: + - in_format=MASA + - out_format=EXT + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-masa-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 16 hours + script: + - in_format=MASA + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-masa-in-hoa3-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 17 hours + script: + - in_format=MASA + - out_format=HOA3 + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +# complexity-omasa-in-ext-out: +# extends: +# - .complexity-template +# rules: +# - if: $MEASURE_COMPLEXITY_LINUX +# when: delayed +# start_in: 13 hours +# script: +# - in_format=OMASA +# - out_format=EXT +# - ret_val=0 +# - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? +# - *complexity-measurements-prepare-artifacts +# - exit $ret_val + +complexity-omasa-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 18 hours + script: + - in_format=OMASA + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-omasa-in-hoa3-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 20 hours + script: + - in_format=OMASA + - out_format=HOA3 + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-StereoDmxEVS-stereo-in-mono-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 22 hours + script: + - in_format=StereoDmxEVS + - out_format=mono + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +# complexity-osba-in-ext-out: +# extends: +# - .complexity-template +# rules: +# - if: $MEASURE_COMPLEXITY_LINUX +# when: delayed +# start_in: 17 hours +# script: +# - in_format=OSBA +# - out_format=EXT +# - ret_val=0 +# - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? +# - *complexity-measurements-prepare-artifacts +# - exit $ret_val + +complexity-osba-in-binaural-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 22 hours 30 minutes + script: + - in_format=OSBA + - out_format=BINAURAL + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +complexity-osba-in-binaural_room_ir-out: + extends: + - .complexity-template + rules: + - if: $MEASURE_COMPLEXITY_LINUX + when: delayed + start_in: 25 hours + script: + - in_format=OSBA + - out_format=BINAURAL_ROOM_IR + - ret_val=0 + - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" || ret_val=$? + - *complexity-measurements-prepare-artifacts + - exit $ret_val + +# job that sets up gitlab pages website +pages: + stage: deploy + tags: + - ivas-basop-linux + rules: + - if: $UPDATE_PAGES + script: + - *print-common-info + - *update-scripts-repo + - python3 ci/setup_pages.py + - ls + - ls -lh public + artifacts: + paths: + - public + expire_in: 1 day -- GitLab