Loading .gitlab-ci.yml +130 −0 Original line number Diff line number Diff line Loading @@ -286,3 +286,133 @@ ivas-pytest-mld-dec-lev-10: reports: junit: - report-junit.xml ivas-pytest-mld-dec-lev+10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=3.162 # +10 dB, 10^(10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld --dut_encoder_path ./IVAS_cod_ref -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml ivas-pytest-mld-enc-dec-lev-10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=0.3162 # -10 dB, 10^(-10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml ivas-pytest-mld-enc-dec-lev+10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=3.162 # +10 dB, 10^(10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml Loading
.gitlab-ci.yml +130 −0 Original line number Diff line number Diff line Loading @@ -286,3 +286,133 @@ ivas-pytest-mld-dec-lev-10: reports: junit: - report-junit.xml ivas-pytest-mld-dec-lev+10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=3.162 # +10 dB, 10^(10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld --dut_encoder_path ./IVAS_cod_ref -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml ivas-pytest-mld-enc-dec-lev-10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=0.3162 # -10 dB, 10^(-10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml ivas-pytest-mld-enc-dec-lev+10: extends: - .test-job-linux stage: test needs: ["build-codec-linux-make"] timeout: "30 minutes" script: - *print-common-info - *update-scripts-repo - *update-ltv-repo - *copy-ltv-files-to-testv-dir - *remove-unsupported-testcases - LEVEL_SCALING=3.162 # +10 dB, 10^(10/20) - *apply-testv-scaling - *mld-test-setup-codec ### run pytest - exit_code=0 - python3 -m pytest $TEST_SUITE -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --mld -n auto || exit_code=$? - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true - python3 scripts/parse_mld.py report.html mld.csv - if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi - if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi - exit 0 allow_failure: exit_codes: - 123 artifacts: name: "mld--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results" expire_in: 1 week when: always paths: - report-junit.xml - report.html - mld.csv expose_as: "pytest mld results" reports: junit: - report-junit.xml