Skip to content
Commits on Source (865)
variables:
TESTV_DIR: "/usr/local/testv"
LTV_DIR: "/usr/local/ltv"
EVS_BE_TEST_DIR_BASOP: "/usr/local/be_2_evs_basop"
FLOAT_REF_BRANCH: "ivas-float-update"
BUILD_OUTPUT: "build_output.txt"
SCRIPTS_DIR: "/usr/local/scripts"
EXIT_CODE_NON_BE: 123
EXIT_CODE_FAIL: 1
LONG_TEST_SUITE: "tests/codec_be_on_mr_nonselection tests/renderer --param_file scripts/config/self_test_ltv.prm --use_ltv"
LONG_TEST_SUITE_NO_RENDERER: "tests/codec_be_on_mr_nonselection --param_file scripts/config/self_test_ltv.prm --use_ltv"
SHORT_TEST_SUITE: "tests/codec_be_on_mr_nonselection"
SHORT_TEST_SUITE_ENCODER: "tests/codec_be_on_mr_nonselection/test_param_file.py --param_file scripts/config/self_test_basop_encoder.prm"
LONG_TEST_SUITE_ENCODER: "tests/codec_be_on_mr_nonselection/test_param_file.py --param_file scripts/config/self_test_ltv_basop_encoder.prm"
TEST_SUITE: ""
DISABLE_HRTF_MODEL_TESTS: ""
# DISABLE_HRTF_MODEL_TESTS: "-k not model" #enable this declaration to disable HRTF model from file tests
# These path variables are used by the pytest calls.
# They can be overwritten in the job templates to e.g. only test encoder or decoder in the chain
DUT_ENCODER_PATH: "./IVAS_cod"
DUT_DECODER_PATH: "./IVAS_dec"
REF_ENCODER_PATH: "./IVAS_cod_ref"
REF_DECODER_PATH: "./IVAS_dec_ref"
MERGE_TARGET_ENCODER_PATH: "./IVAS_cod_merge_target"
MERGE_TARGET_DECODER_PATH: "./IVAS_dec_merge_target"
# These path variables are used for building the binaries
# They should never be overwritten!
REF_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_ref"
REF_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_ref"
MERGE_TARGET_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_merge_target"
MERGE_TARGET_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_merge_target"
LEVEL_SCALING: "1.0"
IVAS_PIPELINE_NAME: ''
BASOP_CI_BRANCH_PC_REPO: "basop-ci-branch-pc"
PRM_FILES: "scripts/config/self_test.prm scripts/config/self_test_ltv.prm"
TESTCASE_TIMEOUT_STV: 900
TESTCASE_TIMEOUT_LTV: 2400
TESTCASE_TIMEOUT_LTV_SANITIZERS: 6000
CI_REGRESSION_THRESH_MLD: "0.1"
CI_REGRESSION_THRESH_MAX_ABS_DIFF: "50"
CI_REGRESSION_THRESH_SSNR: "-1"
CI_REGRESSION_THRESH_ODG: "-0.05"
GIT_CLEAN_FLAGS: -ffdxq
INSTR_DIR: "scripts/c-code_instrument"
TESTS_DIR_CODEC_BE_ON_MR: "tests/codec_be_on_mr_nonselection"
BUILD_WITH_DEBUG_MODE_INFO: ""
ENCODER_TEST: ""
DELTA_ODG: ""
COMPARE_DMX: ""
SPLIT_COMPARISON: ""
SKIP_REGRESSION_CHECK: ""
FAILED_TESTCASES_LIST: "failed-testcases.txt"
ERRORS_TESTCASES_LIST: "errors-testcases.txt"
PYTEST_CACHE_ARTIFACT: "pytest_cache.zip"
MEASURES_FOR_REPORT: "MLD MAX_ABS_DIFF MIN_SSNR MIN_ODG"
FLOAT_REF_COMMIT_FILE: "float-ref-git-sha.txt"
CUT_COMMIT_FILE: "CuT-git-sha.txt"
MERGE_TARGET_COMMIT_FILE: "merge-target-git-sha.txt"
MANUAL_PIPELINE_TYPE:
description: "Type for the manual pipeline run. Use 'pytest-compare' to run comparison test against reference float codec."
value: 'default'
options:
- 'default'
- 'pytest-compare'
- 'pytest-compare-long'
- 'pytest-compare-to-input'
- 'pytest-saturation-smoke-test'
- 'evs-26444'
- 'sanitizer'
- 'pytest-renderer'
- 'complexity'
- 'coverage'
- 'voip-be-test'
- 'peaq-enc-passthrough'
default:
interruptible: true # Make all jobs by default interruptible
workflow:
name: '$IVAS_PIPELINE_NAME'
rules:
# see https://docs.gitlab.com/ee/ci/yaml/workflow.html#switch-between-branch-pipelines-and-merge-request-pipelines
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push"
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
IVAS_PIPELINE_NAME: 'MR pipeline: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME'
### disabled for now because pipeline cd is redundant with MR pipeline with current workflow
# - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH # Pushes to main
# variables:
# IVAS_PIPELINE_NAME: 'Push pipeline: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'default' # for testing
variables:
IVAS_PIPELINE_NAME: 'Web run pipeline: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare'
variables:
IVAS_PIPELINE_NAME: 'Run comparison tools against float ref: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare-long'
variables:
IVAS_PIPELINE_NAME: 'Run comparison tools against float ref (long test vectors): $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-compare-to-input'
variables:
IVAS_PIPELINE_NAME: 'Run comparison tools against input (pass-through only): $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-saturation-smoke-test'
variables:
IVAS_PIPELINE_NAME: 'Run saturation smoke-test: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'evs-26444'
variables:
IVAS_PIPELINE_NAME: 'EVS 26.444 test: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'sanitizer'
variables:
IVAS_PIPELINE_NAME: 'Short testvectors sanitizers'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'pytest-renderer'
variables:
IVAS_PIPELINE_NAME: 'Renderer test: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'complexity'
variables:
IVAS_PIPELINE_NAME: 'Complexity Measurement on $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'voip-be-test'
variables:
IVAS_PIPELINE_NAME: 'Voip BE test on $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == 'peaq-enc-passthrough'
variables:
IVAS_PIPELINE_NAME: 'PEAQ encoder pass-through test: $CI_COMMIT_BRANCH'
- if: $CI_PIPELINE_SOURCE == 'schedule' # Scheduled in any branch
variables:
IVAS_PIPELINE_NAME: 'Scheduled pipeline: $CI_COMMIT_BRANCH'
stages:
- .pre
- prevalidate
- build
- check-be
- test
- compare
- postvalidate
- deploy
# ---------------------------------------------------------------
# Generic script anchors
# ---------------------------------------------------------------
# These can be used later on to do common tasks
# Prints useful information for every job and should be used at the beginning of each job
.print-common-info: &print-common-info
- |
echo "Printing common information for build job."
echo "Current job is run on commit $CI_COMMIT_SHA"
echo "Commit time was $CI_COMMIT_TIMESTAMP"
date | xargs echo "System time is"
.print-common-info-windows: &print-common-info-windows
- |
echo "Printing common information for build job."
echo "Current job is run on commit $CI_COMMIT_SHA"
echo "Commit time was $CI_COMMIT_TIMESTAMP"
("echo 'System time is'", "Get-Date -Format 'dddd dd/MM/yyyy HH:mm K'") | Invoke-Expression
# From float CI
.mr-fetch-target-branch: &mr-fetch-target-branch
# first delete local target branch to avoid conflicts when branch is cached and there are merge conflicts during fetching
# depending on chaching, the branch may not be there, so prevent failure of this command -> should maybe be done smarter later
- git branch -D $CI_MERGE_REQUEST_TARGET_BRANCH_NAME || true
# needed when depth is lower than the number of commits in the branch
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
# From float CI
.mr-get-target-commit: &mr-get-target-commit # compare to last target branch commit before pipeline was created
- target_commit=$(git log $CI_MERGE_REQUEST_TARGET_BRANCH_NAME -1 --oneline --before=${CI_PIPELINE_CREATED_AT} --format=%H)
# From float CI
.merge-request-comparison-setup-codec: &merge-request-comparison-setup-codec
### build test binaries, initial clean for paranoia reasons
- make clean
- make -j
- mv IVAS_cod IVAS_cod_test
- mv IVAS_dec IVAS_dec_test
- mv IVAS_rend IVAS_rend_test
- git restore .
### store the current commit hash
- source_branch_commit_sha=$(git rev-parse HEAD)
### checkout version to compare against
- *mr-fetch-target-branch
- *mr-get-target-commit
- git checkout $target_commit
- echo "Building reference codec at commit $target_commit"
### build reference binaries
- make clean
- make -j
- mv IVAS_cod IVAS_cod_ref
- mv IVAS_dec IVAS_dec_ref
- mv IVAS_rend IVAS_rend_ref
- git restore .
# rename test binaries back
- mv IVAS_cod_test IVAS_cod
- mv IVAS_dec_test IVAS_dec
- mv IVAS_rend_test IVAS_rend
.merge-request-comparison-check: &merge-request-comparison-check
- echo "--------------- Running merge-request-comparison-check anchor ---------------"
- if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi
- if [ $exit_code -ne 0 ] && [ $non_be_flag == 0 ]; then echo "Non-bitexact cases without non-BE tag encountered!"; exit $EXIT_CODE_FAIL; fi
- if [ $exit_code -ne 0 ] && [ $non_be_flag != 0 ]; then echo "Non-bitexact cases with non-BE tag encountered"; exit $EXIT_CODE_NON_BE; fi
- exit 0
.activate-debug-mode-info-if-set: &activate-debug-mode-info-if-set
- if [ "$BUILD_WITH_DEBUG_MODE_INFO" = "true" ]; then
- sed -i.bak -e "s/\/\*\ *\(#define\ *DEBUGGING\ *\)\*\//\1/g" lib_com/options.h
- sed -i.bak -e "s/\/\*\ *\(#define\ *DEBUG_MODE_INFO\ *\)\*\//\1/g" lib_com/options.h
- fi
.build-float-ref-binaries: &build-float-ref-binaries
- git rev-parse HEAD > $CUT_COMMIT_FILE
- current_commit_sha=$(git rev-parse HEAD)
### build reference binaries
- git checkout $FLOAT_REF_BRANCH
- git pull origin $FLOAT_REF_BRANCH
- *activate-debug-mode-info-if-set
- make clean
- make -j >> /dev/null
- mv ./IVAS_cod ./$REF_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY
- mv ./IVAS_dec ./$REF_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY
- mv ./IVAS_rend ./IVAS_rend_ref
### Return to current branch
- git restore .
- git rev-parse HEAD > $FLOAT_REF_COMMIT_FILE
- git checkout $current_commit_sha
.build-merge-target-binaries: &build-merge-target-binaries
- current_commit_sha=$(git rev-parse HEAD)
### build merge target binaries
- git checkout $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- *activate-debug-mode-info-if-set
- make clean
- make -j >> /dev/null
- mv ./IVAS_cod ./$MERGE_TARGET_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY
- mv ./IVAS_dec ./$MERGE_TARGET_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY
- mv ./IVAS_rend ./IVAS_rend_merge_target
### Return to current branch
- git restore .
- git rev-parse HEAD > $MERGE_TARGET_COMMIT_FILE
- git checkout $current_commit_sha
.build-float-ref-and-dut-binaries: &build-float-ref-and-dut-binaries
### build reference binaries
- *build-float-ref-binaries
### build dut binaries
- *activate-debug-mode-info-if-set
- make clean
- make -j >> /dev/null
.build-and-create-float-ref-outputs: &build-and-create-float-ref-outputs
- *build-float-ref-and-dut-binaries
### prepare pytest
# create short test vectors
- python3 tests/create_short_testvectors.py
# create references
- exit_code=0
- enc_stats_arg=""
- if [ "$ENCODER_TEST" = "true" ]; then enc_stats_arg="--enc_stats"; fi
- python3 -m pytest $TEST_SUITE -v --update_ref 1 $enc_stats_arg --create_ref -n auto --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH || exit_code=$?
.update-scripts-repo: &update-scripts-repo
- cd $SCRIPTS_DIR
- sed -i '/fetch/d' .git/config # Remove all fetch lines to clean out dead links
- git remote set-branches --add origin $BASOP_CI_BRANCH_PC_REPO # Add currently used branch
- git fetch
- git restore --staged . # Needed if HRTF model files were updated.
- git restore . # Just as a precaution
- git checkout $BASOP_CI_BRANCH_PC_REPO
- git pull origin $BASOP_CI_BRANCH_PC_REPO
- cd -
- cp -r $SCRIPTS_DIR/ci .
- cp -r $SCRIPTS_DIR/scripts .
- cp -r $SCRIPTS_DIR/tests .
- cp $SCRIPTS_DIR/pytest.ini .
.apply-testv-scaling: &apply-testv-scaling
- echo "Applying level scaling in scripts/testv using scale=$LEVEL_SCALING"
- tests/scale_pcm.py ./scripts/testv/ $LEVEL_SCALING
.merge-request-comparison-check: &merge-request-comparison-check
- echo "--------------- Running merge-request-comparison-check anchor ---------------"
- if [ $zero_errors != 1 ]; then echo "Run errors encountered!"; exit $EXIT_CODE_FAIL; fi
- if [ $exit_code -eq 1 ] && [ $non_be_flag == 0 ]; then echo "Non-bitexact cases without non-BE tag encountered!"; exit $EXIT_CODE_FAIL; fi
- if [ $exit_code -eq 1 ] && [ $non_be_flag != 0 ]; then echo "Non-bitexact cases with non-BE tag encountered"; exit $EXIT_CODE_NON_BE; fi
- exit 0
.update-ltv-repo: &update-ltv-repo
- cd $LTV_DIR
- git pull
- cd -
.get-commits-behind-count: &get-commits-behind-count
- echo $CI_COMMIT_SHA
- echo $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- commits_behind_count=$(git rev-list --count $CI_COMMIT_SHA..origin/$CI_MERGE_REQUEST_TARGET_BRANCH_NAME)
.check-commits-behind-count-in-compare-jobs: &check-commits-behind-count-in-compare-jobs
- |
if [ $commits_behind_count -ne 0 ]; then
echo "Your branch is not up-to-date with main -> Compare tests will not run as they can contain false negatives this way."
echo "Main might have changed during your pipeline run. Run 'git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME' to update."
exit 1
fi
.copy-ltv-files-to-testv-dir: &copy-ltv-files-to-testv-dir
- cp "$LTV_DIR"/*.wav scripts/testv/
- cp "$LTV_DIR"/*.met scripts/testv/
- cp "$LTV_DIR"/*.csv scripts/testv/
.activate-Werror-linux: &activate-Werror-linux
- sed -i.bak "s/^# \(CFLAGS += -Werror\)/\1/" Makefile
- sed -i.bak "s/# \(set(CMAKE_C_FLAGS \"\${CMAKE_C_FLAGS} -Werror\")\)/\1/" CMakeLists.txt
.activate-WX-windows: &activate-WX-windows
- (Get-Content -Path "CMakeLists.txt") -replace '# \(add_compile_options\("\/WX"\)\)', '$1' | Set-Content -Path "CMakeLists.txt"
- Get-ChildItem -Path "Workspace_msvc" -Filter "*.vcxproj" | ForEach-Object { (Get-Content -Path $_.FullName) -replace '<TreatWarningAsError>false', '<TreatWarningAsError>true' | Set-Content -Path $_.FullName }
.remove_unsupported_testcases: &remove_unsupported_testcases
# Note: the --use-main-pc-set arg should only be used on main-pc and float-pc branches
- python3 ci/remove_unsupported_testcases.py $PRM_FILES --use-main-pc-set
# ---------------------------------------------------------------
# Job templates
# ---------------------------------------------------------------
# When designing templates, try not to use too much inheritance and
# if multiple templates and extended on, remember that on conflict,
# latest overwrites the parameter.
# templates for rules
.rules-basis:
rules:
- if: $MIRROR_ACCESS_TOKEN # Don't run in the mirror update pipeline (only then MIRROR_ACCESS_TOKEN is defined)
when: never
- if: $CI_PIPELINE_SOURCE == 'schedule' # Don't run in any scheduled pipelines by default (use schedule templates below to enable again for certain conditions)
when: never
- if: $CI_PIPELINE_SOURCE == 'trigger' # Don't run triggered pipeline by default
when: never
- if: $MANUAL_PIPELINE_TYPE == 'test-be-release' # Skip all the normal jobs when testing manually against release codec
when: never
- if: $MANUAL_PIPELINE_TYPE == 'test-long-self-test' # Skip all the normal jobs when testing manually against release codec
when: never
- if: $MANUAL_PIPELINE_TYPE == 'ivas-conformance'
when: never
- if: $MANUAL_PIPELINE_TYPE == 'ivas-conformance-linux'
when: never
- if: $MANUAL_PIPELINE_TYPE == 'check-clipping'
when: never
- if: $MANUAL_PIPELINE_TYPE == 'test-branch-vs-input-passthrough'
when: never
- when: on_success
.rules-merge-request:
extends: .rules-basis
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-pytest-to-ref-short:
rules:
- if: $PYTEST_MLD_SHORT # Set by scheduled pipeline
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare"
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-pytest-to-input-short:
rules:
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare-to-input"
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'schedule'
when: never
.rules-pytest-to-main-short:
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-mr-to-main-or-main-pc:
rules:
- if: $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main-pc")
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-mr-to-main-or-main-pc-or-manual:
rules:
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare"
- if: $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main-pc")
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-pytest-long:
rules:
- if: $PYTEST_MLD_LONG # Set by scheduled pipeline
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-compare-long"
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: never
.rules-coverage:
rules:
- if: $COVERAGE_TEST # Set by scheduled pipeline
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "coverage"
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: never
.rules-pytest-saturation-smoke-test:
rules:
- if: $PYTEST_SMOKE_TEST # Set by scheduled pipeline
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-saturation-smoke-test"
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: never
# ---------------------------------------------------------------
# Job templates
# ---------------------------------------------------------------
# When designing templates, try not to use too much inheritance and
# if multiple templates and extended on, remember that on conflict,
# latest overwrites the parameter.
# templates for rules
.rules-basis:
rules:
- if: $MIRROR_ACCESS_TOKEN # Don't run in the mirror update pipeline (only then MIRROR_ACCESS_TOKEN is defined)
when: never
- if: $CI_PIPELINE_SOURCE == 'schedule' # Don't run in any scheduled pipelines by default (use schedule templates below to enable again for certain conditions)
when: never
- if: $CI_PIPELINE_SOURCE == 'trigger' # Don't run triggered pipeline by default
when: never
- if: $MANUAL_PIPELINE_TYPE == 'test-be-release' # Skip all the normal jobs when testing manually against release codec
when: never
- if: $MANUAL_PIPELINE_TYPE == 'test-long-self-test' # Skip all the normal jobs when testing manually against release codec
when: never
- if: $MANUAL_PIPELINE_TYPE == 'ivas-conformance'
when: never
- if: $MANUAL_PIPELINE_TYPE == 'ivas-conformance-linux'
when: never
- if: $MANUAL_PIPELINE_TYPE == 'check-clipping'
- if: $MANUAL_PIPELINE_TYPE == 'test-branch-vs-input-passthrough'
when: never
- when: on_success
.rules-merge-request:
extends: .rules-basis
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-merge-request-no-draft:
extends: .rules-basis
rules:
- if: $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
.rules-merge-request-to-main-pc:
extends: .rules-basis
rules:
- if: $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main-pc'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
# templates to define stages and platforms
.test-job-linux:
tags:
- ivas-linux
.build-job-linux:
stage: build
timeout: "2 minutes"
tags:
- ivas-linux
.build-job-windows:
stage: build
timeout: "4 minutes"
tags:
- ivas-windows
.print-results-banner: &print-results-banner
- set +x
- echo ""
- echo -e "==================================================================================================================\n================================================== TEST RESULTS ==================================================\n==================================================================================================================\n"
# template for test jobs on linux that need the TESTV_DIR
.test-job-linux-needs-testv-dir:
extends: .test-job-linux
before_script:
- *update-scripts-repo
- if [ ! -d "$TESTV_DIR" ]; then mkdir -p $TESTV_DIR; fi
- cp -r scripts/testv/* $TESTV_DIR/
.ivas-pytest-anchor: &ivas-pytest-anchor
stage: test
needs: ["build-codec-linux-make"]
timeout: "360 minutes"
variables:
# keep "mld" in artifact name for backwards compatibility reasons
CSV_ARTIFACT_NAME: "mld--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv"
CSV_ARTIFACT_SPLIT: "mld--split--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv"
MERGED_CSV_ARTIFACT_NAME: "$CI_JOB_NAME--merged_csv--$CI_JOB_ID.csv"
PAGES_HTML_ARTIFACT_NAME: "$CI_JOB_NAME-index.html"
SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html"
SUMMARY_HTML_ARTIFACT_SPLIT: "summary_split_$CI_JOB_NAME.html"
IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME"
IMAGES_ARTIFACT_SPLIT: "images_split_$CI_JOB_NAME"
script:
- *print-common-info
- *update-scripts-repo
- if [ $USE_LTV -eq 1 ]; then
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- testcase_timeout=$TESTCASE_TIMEOUT_LTV
- else
- testcase_timeout=$TESTCASE_TIMEOUT_STV
- fi
- *remove_unsupported_testcases
- if [ $LEVEL_SCALING != "1.0" ];then
- *apply-testv-scaling
- fi
- if [ "$COMPARE_DMX" = "true" ] || [ "$ENCODER_TEST" = "true" ]; then
- BUILD_WITH_DEBUG_MODE_INFO="true"
- fi
- INV_LEVEL_SCALING=$(awk "BEGIN {print 1.0 / $LEVEL_SCALING}")
- comp_args="--mld --ssnr --odg --scalefac $INV_LEVEL_SCALING"
- summary_args="MLD DIFF SSNR ODG"
- REPORT_ARG=""
- if [ "$ENCODER_TEST" = "true" ]; then comp_args="${comp_args} --enc_stats"; fi
- if [ "$DELTA_ODG" = "true" ]; then comp_args="${comp_args} --odg_bin"; MEASURES_FOR_REPORT="$MEASURES_FOR_REPORT DELTA_ODG"; fi
- if [ "$SPLIT_COMPARISON" = "true" ]; then comp_args="${comp_args} --split-comparison"; fi
- *build-and-create-float-ref-outputs
# DMX comparison only in manual job with no other metrics
- if [ "$COMPARE_DMX" = "true" ]; then
- comp_args="--compare_enc_dmx"
- fi
- echo "$comp_args"
### run pytest
- exit_code=0
- python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report.html --self-contained-html --junit-xml=report-junit.xml $comp_args -n auto --testcase_timeout $testcase_timeout --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH || exit_code=$?
- zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true
### create histograms
- if [ "$SPLIT_COMPARISON" = "true" ]; then
- python3 scripts/parse_xml_report.py report-junit.xml $CSV_ARTIFACT_NAME --split-csv-file $CSV_ARTIFACT_SPLIT
- else
- python3 scripts/parse_xml_report.py report-junit.xml $CSV_ARTIFACT_NAME
- fi
# first for "whole" files comparison
- python3 scripts/create_histograms.py $CSV_ARTIFACT_NAME $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT --write-out-histograms
- python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT
- if [ "$SPLIT_COMPARISON" = "true" ]; then
- python3 scripts/create_histograms.py $CSV_ARTIFACT_SPLIT $IMAGES_ARTIFACT_SPLIT --measures $MEASURES_FOR_REPORT --write-out-histograms
- python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_SPLIT $CI_JOB_ID $CI_JOB_NAME $IMAGES_ARTIFACT_SPLIT --measures $MEASURES_FOR_REPORT
- else
# touch files to suppress warning for missing artifacts
- touch $CSV_ARTIFACT_SPLIT $IMAGES_ARTIFACT_SPLIT
- fi
- if [ $USE_LTV -eq 1 ] && [ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]; then
- id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH $CI_JOB_NAME $CI_PROJECT_ID)
- echo "Job ID from variables - $CI_JOB_ID, Job ID from script - $id_previous"
- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip
- unzip artifacts.zip -d previous_artifacts
# This wildcard thingy relies on only one csv file being present per job
- file_previous="previous_artifacts/mld--$CI_JOB_NAME-$id_previous--sha-*.csv"
- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME
- else
# create empty file for artifacts to avoid errors
- touch $PAGES_HTML_ARTIFACT_NAME
- touch $MERGED_CSV_ARTIFACT_NAME
- fi
- if [ $zero_errors != 1 ]; then
- echo "Run errors encountered!"
# TODO: temporary only to not fail MR pipelines on crashes
- if [ $CI_PIPELINE_SOURCE == 'merge_request_event' ]; then
- exit $EXIT_CODE_NON_BE
- else
- exit $EXIT_CODE_FAIL
- fi
- fi
- if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_NON_BE; fi
- exit 0
allow_failure:
exit_codes:
- 123
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- report-junit.xml
- report.html
- $PAGES_HTML_ARTIFACT_NAME
- $CSV_ARTIFACT_NAME
- $CSV_ARTIFACT_SPLIT
- $MERGED_CSV_ARTIFACT_NAME
- $SUMMARY_HTML_ARTIFACT_NAME
- $SUMMARY_HTML_ARTIFACT_SPLIT
- $IMAGES_ARTIFACT_NAME
- $IMAGES_ARTIFACT_SPLIT
expose_as: "pytest compare results"
reports:
junit:
- report-junit.xml
.check-up-to-date-in-comparison-jobs: &check-up-to-date-in-comparison-jobs
- *get-commits-behind-count
- |
if [ $commits_behind_count -ne 0 ]; then
set +x
echo -e "Your branch is $commits_behind_count commits behind the target branch, possibly main changed during your pipeline run. Checking bitexactness or testing for regressions now can result in meaningless results. Run\n\t git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME\nto update."
exit 1
fi
.check-be-to-target-anchor: &check-be-to-target-anchor
stage: check-be
needs: ["build-codec-linux-make"]
timeout: "300 minutes"
variables:
XML_REPORT: "report--$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.xml"
HTML_REPORT: "report--$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.html"
PYTEST_LOG_TARGET_BRANCH: "pytest-log-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME.txt"
script:
- *print-common-info
- set -euxo pipefail
- *update-scripts-repo
- python3 tests/create_short_testvectors.py
- if [ $USE_LTV -eq 1 ]; then
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- testcase_timeout=$TESTCASE_TIMEOUT_LTV
- else
- testcase_timeout=$TESTCASE_TIMEOUT_STV
- fi
- *remove_unsupported_testcases
- python3 scripts/prepare_combined_format_inputs.py
- if [ $LEVEL_SCALING != "1.0" ];then
- *apply-testv-scaling
- fi
- *build-float-ref-binaries
- *build-merge-target-binaries
- make clean
- make -j >> /dev/null
- *check-up-to-date-in-comparison-jobs
- exit_code_target=0
- python3 -m pytest $TEST_SUITE -v --update_ref 1 --create_ref -n auto --ref_encoder_path $MERGE_TARGET_ENCODER_PATH --ref_decoder_path $MERGE_TARGET_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH $DISABLE_HRTF_MODEL_TESTS > $PYTEST_LOG_TARGET_BRANCH || exit_code_target=$?
- exit_code=0
- rm -rf .pytest_cache || true
- python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT --self-contained-html --junit-xml=$XML_REPORT --ref_encoder_path $MERGE_TARGET_ENCODER_PATH --ref_decoder_path $MERGE_TARGET_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout $DISABLE_HRTF_MODEL_TESTS > pytest_log.txt || exit_code=$?
- if [ $exit_code -ne 0 ]; then
- exit_code=$EXIT_CODE_NON_BE
- zip -r $PYTEST_CACHE_ARTIFACT .pytest_cache
- grep "^FAILED" pytest_log.txt | sed "s/^FAILED /'/" | sed "s/] - .*/]'/" | tr "\n" " " > $FAILED_TESTCASES_LIST || true
- grep "^FAILED" pytest_log.txt | sed "s/^FAILED //" | sed "s/] - .*/]/" > failed_testcases_for_printing.txt || true
- num_failures=$(wc -l < failed_testcases_for_printing.txt)
- grep "^ERROR" pytest_log.txt | sed "s/^ERROR /'/" | sed "s/] - .*/]'/" | tr "\n" " " > $ERRORS_TESTCASES_LIST || true
- grep "^ERROR" pytest_log.txt | sed "s/^ERROR //" | sed "s/] - .*/]/" > errors_testcases_for_printing.txt || true
- num_errors=$(wc -l < errors_testcases_for_printing.txt)
- *print-results-banner
- echo "Found these $num_failures non-bitexact testcases:"
- cat failed_testcases_for_printing.txt
- echo "Reproduce locally with:"
- echo -e "1. Create references with target branch $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:\n\t- git checkout $(cat $FLOAT_REF_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- mv IVAS_cod IVAS_cod_ref\n\t- mv IVAS_dec IVAS_dec_ref\n\t- python3 -m pytest $(cat $FAILED_TESTCASES_LIST) --update_ref 1 --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH"
- echo -e "2. Run test with source branch $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:\n\t- git checkout $(cat $CUT_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- python3 -m pytest $(cat $FAILED_TESTCASES_LIST) --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH"
- echo "The individual command lines can be found in the html report in the job artifacts."
- if [ $num_errors -ne 0 ]; then
- exit_code=1
- echo "There were errors present in the following testcases:"
- cat errors_testcases_for_printing.txt
- fi
- exit $exit_code
- else
# create empty files to not have errors at artifact stage
- touch $FAILED_TESTCASES_LIST
- touch $ERRORS_TESTCASES_LIST
- touch $PYTEST_CACHE_ARTIFACT
- *print-results-banner
- echo "All testcases are bitexact."
- fi
- exit $exit_code
allow_failure:
exit_codes:
- 123
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- $XML_REPORT
- $HTML_REPORT
- $FAILED_TESTCASES_LIST
- $ERRORS_TESTCASES_LIST
- pytest_log.txt
- $PYTEST_CACHE_ARTIFACT
- $FLOAT_REF_COMMIT_FILE
- $CUT_COMMIT_FILE
- $MERGE_TARGET_COMMIT_FILE
- $PYTEST_LOG_TARGET_BRANCH
expose_as: "pytest compare results"
reports:
junit:
- $XML_REPORT
- $XML_REPORT
.overwrite-pytest-cache-with-artifact: &overwrite-pytest-cache-with-artifact
- if [ -f $PYTEST_CACHE_ARTIFACT ]; then
- rm -rf .pytest_cache || true
- unzip $PYTEST_CACHE_ARTIFACT
- fi
.check-regressions-pytest-anchor: &check-regressions-pytest-anchor
stage: test
timeout: "300 minutes"
variables:
XML_REPORT_BRANCH: "report-junit-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.xml"
XML_REPORT_MAIN: "report-junit-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.xml"
HTML_REPORT_BRANCH: "report-junit-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.html"
HTML_REPORT_MAIN: "report-junit-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.html"
CSV_BRANCH: "scores-branch-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.csv"
CSV_MAIN: "scores-main-$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA.csv"
IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME"
SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html"
script:
- *print-common-info
# create empty files for all artifacts to suppress warnings in case of no regressions found or all is BE
- touch $XML_REPORT_BRANCH $XML_REPORT_MAIN $HTML_REPORT_BRANCH $HTML_REPORT_MAIN $CSV_BRANCH $CSV_MAIN $SUMMARY_HTML_ARTIFACT_NAME $FLOAT_REF_COMMIT_FILE $CUT_COMMIT_FILE $MERGE_TARGET_COMMIT_FILE regressions_crashes.csv regressions_MLD.csv regressions_MAX_ABS_DIFF.csv regressions_MIN_SSNR.csv regressions_MIN_ODG.csv improvements_crashes.csv improvements_MLD.csv improvements_MAX_ABS_DIFF.csv improvements_MIN_SSNR.csv improvements_MIN_ODG.csv
- mkdir $IMAGES_ARTIFACT_NAME
- set -euxo pipefail
- if [ -s $FAILED_TESTCASES_LIST ]; then
- *overwrite-pytest-cache-with-artifact
- export PYTEST_ADDOPTS=--last-failed
- else
# turn off echoing back of commands for result printout
- *print-results-banner
- echo -e "All tested cases were bit-exact between $CI_MERGE_REQUEST_TARGET_BRANCH_NAME and $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME.\nNo need to check for regressions. All is fine."
- exit 0
- fi
- *update-scripts-repo
- if [ $USE_LTV -eq 1 ]; then
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- testcase_timeout=$TESTCASE_TIMEOUT_LTV
- else
- testcase_timeout=$TESTCASE_TIMEOUT_STV
- fi
- *remove_unsupported_testcases
- if [ $LEVEL_SCALING != "1.0" ];then
- *apply-testv-scaling
- fi
# check MR title for flag that allows regressions to be mergable
- echo $CI_MERGE_REQUEST_TITLE > tmp.txt
- allow_regressions_flag=$(grep -c --ignore-case "\[allow[ -]*regression\]" tmp.txt) || true
- INV_LEVEL_SCALING=$(awk "BEGIN {print 1.0 / $LEVEL_SCALING}")
- comp_args="--mld --ssnr --odg --scalefac $INV_LEVEL_SCALING"
### run branch first
# this per default builds the branch and the reference and creates the reference outputs
- *build-and-create-float-ref-outputs
- *check-up-to-date-in-comparison-jobs
# need to restore cache again
- *overwrite-pytest-cache-with-artifact
- python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_BRANCH --self-contained-html --junit-xml=$XML_REPORT_BRANCH $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true
- zero_errors_branch=$(cat $XML_REPORT_BRANCH | grep -c 'errors="0"') || true
- python3 scripts/parse_xml_report.py $XML_REPORT_BRANCH $CSV_BRANCH
# Store branch outputs for comparison
- mv tests/dut tests/dut_branch
# create the summary based on the branch only
- python3 scripts/create_histograms.py $CSV_BRANCH $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT
- python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT
### run main now
- git checkout $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
- make clean
- make -j >> /dev/null
# need to restore cache again
- *overwrite-pytest-cache-with-artifact
- python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_MAIN --self-contained-html --junit-xml=$XML_REPORT_MAIN $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true
- python3 scripts/parse_xml_report.py $XML_REPORT_MAIN $CSV_MAIN
### compare the two csv files for regressions
- regressions_found=0
- python3 scripts/basop_check_for_changes_in_testcases.py --show_improvements --xml_report $XML_REPORT_BRANCH $CSV_BRANCH $CSV_MAIN > regression_log.txt || regressions_found=$?
- exit_code=0
- *print-results-banner
- if [ $zero_errors_branch != 1 ]; then
- echo "Run errors encountered!"
- exit_code=$EXIT_CODE_FAIL
- echo "Reproduce locally with:"
- echo -e "1. Create references with target branch $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:\n\t- git checkout $(cat $FLOAT_REF_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- mv IVAS_cod IVAS_cod_ref\n\t- mv IVAS_dec IVAS_dec_ref\n\t- python3 -m pytest $(cat $ERRORS_TESTCASES_LIST) --update_ref 1 --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH"
- echo -e "2. Run test with source branch $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:\n\t- git checkout $(cat $CUT_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- python3 -m pytest $(cat $ERRORS_TESTCASES_LIST) --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH"
- echo "The individual command lines can be found in the regressions_crashes.csv files in the job artifacts."
- elif [ $regressions_found != 0 ] && [ "$SKIP_REGRESSION_CHECK" != "true" ]; then
- cat regression_log.txt
- if [ $allow_regressions_flag == 0 ]; then
- echo "Detected regression wrt to $CI_MERGE_REQUEST_TARGET_BRANCH_NAME, [allow regression] not set!"
- exit_code=$EXIT_CODE_FAIL;
- else
- echo "Detected regression wrt to $CI_MERGE_REQUEST_TARGET_BRANCH_NAME, [allow regression] set."
- exit_code=$EXIT_CODE_NON_BE;
- fi
- echo "Reproduce locally with:"
- echo -e "1. Create references with target branch $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:\n\t- git checkout $(cat $FLOAT_REF_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- mv IVAS_cod IVAS_cod_ref\n\t- mv IVAS_dec IVAS_dec_ref\n\t- python3 -m pytest $(cat $FAILED_TESTCASES_LIST) --update_ref 1 --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH"
- echo -e "2. Run test with source branch $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:\n\t- git checkout $(cat $CUT_COMMIT_FILE)\n\t- make clean\n\t- make -j\n\t- python3 -m pytest $(cat $FAILED_TESTCASES_LIST) --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH"
- echo "The individual command lines can be found in the regressions_*.csv files in the job artifacts."
- fi
- exit $exit_code
after_script:
- rm -rf tests/dut tests/ref
allow_failure:
exit_codes:
- 123
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- $XML_REPORT_BRANCH
- $XML_REPORT_MAIN
- $HTML_REPORT_BRANCH
- $HTML_REPORT_MAIN
- $CSV_BRANCH
- $CSV_MAIN
- $SUMMARY_HTML_ARTIFACT_NAME
- $IMAGES_ARTIFACT_NAME
- $FLOAT_REF_COMMIT_FILE
- $CUT_COMMIT_FILE
- $MERGE_TARGET_COMMIT_FILE
- regressions_crashes.csv
- regressions_MLD.csv
- regressions_MAX_ABS_DIFF.csv
- regressions_MIN_SSNR.csv
- regressions_MIN_ODG.csv
- improvements_crashes.csv
- improvements_MLD.csv
- improvements_MAX_ABS_DIFF.csv
- improvements_MIN_SSNR.csv
- improvements_MIN_ODG.csv
expose_as: "pytest compare results"
reports:
junit:
- $XML_REPORT_BRANCH
- $XML_REPORT_MAIN
.ivas-pytest-sanitizers-anchor: &ivas-pytest-sanitizers-anchor
stage: test
needs: ["build-codec-linux-make"]
timeout: "300 minutes"
rules:
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: never
- if: $CI_PIPELINE_SOURCE == 'schedule' && $IVAS_PYTEST_MSAN
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "sanitizer"
script:
- *print-common-info
- *update-scripts-repo
- *copy-ltv-files-to-testv-dir
- *remove_unsupported_testcases
- *build-float-ref-binaries
- set -euxo pipefail
- make_args="CLANG=$CLANG_NUM"
- if [[ $CLANG_NUM == 3 ]]; then
- export UBSAN_OPTIONS="suppressions=scripts/ubsan_basop.supp,report_error_type=1,print_stacktrace=1"
- python3 scripts/basop_create_ignorelist_for_ubsan.py
- make_args="$make_args IGNORELIST=1"
- fi
- make clean
- make -j $make_args >> /dev/null
- testcase_timeout_arg="--testcase_timeout $TESTCASE_TIMEOUT_LTV_SANITIZERS"
# disable per-testcase timeout for msan to evaluate what is going on that it takes so long
- if [[ $CLANG_NUM = 1 ]]; then
- testcase_timeout_arg=""
- fi
- python3 -m pytest $TEST_SUITE -v --tb=no --update_ref 1 --html=report.html --self-contained-html --junit-xml=report-junit.xml $testcase_timeout_arg --ref_encoder_path $DUT_ENCODER_PATH --ref_decoder_path $DUT_DECODER_PATH
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
when: always
expire_in: "2 weeks"
paths:
- report-junit.xml
- report.html
reports:
junit:
- report-junit.xml
.ivas-pytest-compare-to-input-anchor: &ivas-pytest-compare-to-input-anchor
stage: test
needs: ["build-codec-linux-make"]
timeout: "360 minutes"
variables:
SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html"
IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME"
script:
- *print-common-info
- *update-scripts-repo
- if [ $USE_LTV -eq 1 ]; then
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- testcase_timeout=$TESTCASE_TIMEOUT_LTV
- else
- testcase_timeout=$TESTCASE_TIMEOUT_STV
- fi
- *remove_unsupported_testcases
- if [ $LEVEL_SCALING != "1.0" ];then
- *apply-testv-scaling
- fi
- *build-float-ref-and-dut-binaries
- INV_LEVEL_SCALING=$(awk "BEGIN {print 1.0 / $LEVEL_SCALING}")
- comp_args="--mld --ssnr --odg --scalefac $INV_LEVEL_SCALING"
### run pytest
- exit_code=0
- python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report-ref.html --self-contained-html --junit-xml=report-junit-ref.xml $comp_args -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $REF_ENCODER_PATH --dut_decoder_path $REF_DECODER_PATH --compare_to_input || exit_code=$?
- python3 -m pytest --tb=no $TEST_SUITE -v --create_cut --html=report-dut.html --self-contained-html --junit-xml=report-junit-dut.xml $comp_args -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH --compare_to_input || exit_code=$?
- zero_errors_ref=$(cat report-junit-ref.xml | grep -c 'errors="0"') || true
- zero_errors_dut=$(cat report-junit-dut.xml | grep -c 'errors="0"') || true
- python3 scripts/parse_xml_report.py report-junit-ref.xml report-ref.csv
- python3 scripts/parse_xml_report.py report-junit-dut.xml report-dut.csv
- python3 scripts/diff_report.py report-ref.csv report-dut.csv report-diff.csv
# create summary
- mkdir $IMAGES_ARTIFACT_NAME
- python3 scripts/create_histograms.py report-diff.csv $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT --write-out-histograms --no-bins
- python3 ci/basop-pages/create_summary_page.py $SUMMARY_HTML_ARTIFACT_NAME $CI_JOB_ID $CI_JOB_NAME $IMAGES_ARTIFACT_NAME --measures $MEASURES_FOR_REPORT
- exit 0
allow_failure:
exit_codes:
- 123
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- report-junit-ref.xml
- report-ref.html
- report-ref.csv
- report-junit-dut.xml
- report-dut.html
- report-dut.csv
- report-diff.csv
- $IMAGES_ARTIFACT_NAME
- $SUMMARY_HTML_ARTIFACT_NAME
expose_as: "pytest compare to input results"
reports:
junit:
- report-junit-ref.xml
- report-junit-dut.xml
# ---------------------------------------------------------------
# .pre jobs for setting up things
# ---------------------------------------------------------------
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/194023
# Solution to make main branch pipelines uninterruptible while all other
# pipelines can be interrupted by default. This works because all jobs
# after uninterruptible jobs will be uninterruptible. Resource group
# setting avoids rare case where two fast merges could still interrupt
# pipeline. This should be revisited if there are updates to Gitlab.
uninterruptible:
stage: .pre
interruptible: false
resource_group: uninterruptible
script:
- echo "$CI_COMMIT_BRANCH is uninterruptible"
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
tags:
- ivas-linux
# ---------------------------------------------------------------
# Validation jobs
# ---------------------------------------------------------------
branch-is-up-to-date-with-target-pre:
extends:
- .rules-merge-request-to-main-pc
stage: prevalidate
needs: []
tags:
- ivas-linux
script:
- *get-commits-behind-count
- echo $commits_behind_count
- |
if [ $commits_behind_count -ne 0 ]; then
echo "Your branch is behind the target branch, run 'git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME' to update."
exit 1
fi
branch-is-up-to-date-with-target-post:
extends:
- .rules-merge-request-to-main-pc
stage: postvalidate
tags:
- ivas-linux
script:
- *get-commits-behind-count
- echo $commits_behind_count
- |
if [ $commits_behind_count -ne 0 ]; then
echo "Your branch is behind the target branch, possibly main changed during your pipeline run, run 'git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME' to update."
exit 1
fi
# fail pipeline in the final stage for pipelines on Draft MRs
# this also only runs on Draft MRs, so should always fail
fail-pipeline-if-in-draft:
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
stage: postvalidate
tags:
- ivas-linux
script:
- echo "Your MR is still in Draft state, set it to ready to be mergable, then retrigger the pipeline."
- exit 1
# this branch runs on merges to main-pc only and will fail if the branch itself does not conform to the naming conventions
check-naming-of-branch-for-main-pc-merges:
extends:
- .rules-merge-request-to-main-pc
stage: prevalidate
tags:
- ivas-linux
script:
- *update-scripts-repo
- if [[ ! "$CI_MERGE_REQUEST_TITLE" =~ \[skip[[:space:]_-]name[[:space:]_-]check\] ]] && [[ ! "$CI_MERGE_REQUEST_TITLE" =~ \[CI\] ]]; then
- ci/get_float_ref_branch_name.sh $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
- fi
# ---------------------------------------------------------------
# verification jobs
# ---------------------------------------------------------------
branch-is-up-to-date-with-target-pre:
extends:
- .rules-merge-request
stage: prevalidate
needs: []
tags:
- ivas-linux
script:
- *get-commits-behind-count
- |
if [ $commits_behind_count -ne 0 ]; then
echo -e "Your branch is $commits_behind_count commits behind the target branch, run\n\tgit pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME\nto update."
exit 1
fi
branch-is-up-to-date-with-target-post:
extends:
- .rules-merge-request
stage: postvalidate
tags:
- ivas-linux
script:
- *get-commits-behind-count
- |
if [ $commits_behind_count -ne 0 ]; then
echo -e "Your branch is $commits_behind_count commits behind the target branch, possibly main changed during your pipeline run, run\n\tgit pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME\nto update."
exit 1
fi
clang-format-check:
extends:
- .test-job-linux
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
- if: $CI_PIPELINE_SOURCE == 'schedule'
when: never
variables:
ARTIFACT_BASE_NAME: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--formatting-fix"
stage: prevalidate
tags:
- ivas-linux
needs: []
timeout: "5 minutes"
script:
- *update-scripts-repo
# Set up variables. This can't be done in the "variables" section because variables are not expanded properly there
- PATCH_FILE_NAME="$ARTIFACT_BASE_NAME".patch
- >
INSTRUCTIONS_GITLAB="To fix formatting issues:\n
- download the diff patch available as artifact of this job\n
- unzip the artifact and place the patch file in the root directory of your local IVAS repo\n
- run: git apply $PATCH_FILE_NAME\n
- commit new changes"
- >
INSTRUCTIONS_README="To fix formatting issues:\n
- place the patch file in the root directory of your local IVAS repo\n
- run: git apply $PATCH_FILE_NAME\n
- commit new changes"
- format_problems=0
- scripts/check-format.sh -afD -p 8 || format_problems=$?
- if [ $format_problems == 0 ] ; then exit 0; fi
- mkdir tmp-formatting-fix
- git diff > "tmp-formatting-fix/$PATCH_FILE_NAME"
# Print instructions to job output
- echo -e "$INSTRUCTIONS_GITLAB"
# Include readme in the artifact, in case someone misses the job printout (e.g. getting the artifact via MR interface)
- echo -e "$INSTRUCTIONS_README" > "tmp-formatting-fix/readme.txt"
- exit $format_problems
artifacts:
expire_in: 1 day
paths:
- tmp-formatting-fix/
when: on_failure
name: "$ARTIFACT_BASE_NAME"
expose_as: "formatting patch"
# from float
# needs fix to be merged from floating point repo before it can be activated
.check-bitexactness-hrtf-rom-and-file:
extends:
- .test-job-linux
- .rules-merge-request-to-main-pc
stage: test
needs: ["build-codec-linux-make"]
timeout: "5 minutes"
script:
- *print-common-info
- *update-scripts-repo
- make clean
- make -j
- python3 tests/create_short_testvectors.py --cut_len 1.0
# TODO: run full test again once the custom binary files are supported
- python3 -m pytest -k "not diff_from_rom and not test_binary_file" tests/hrtf_binary_loading --html=report.html --junit-xml=report-junit.xml --self-contained-html
artifacts:
paths:
- report.html
- report-junit.xml
when: always
name: "$CI_JOB_NAME--$CI_MERGE_REQUEST_ID--sha-$CI_COMMIT_SHA--hrtf-loading"
expose_as: "logs-hrtf-loading"
expire_in: "5 days"
# ---------------------------------------------------------------
# Build jobs
# ---------------------------------------------------------------
# ensure that codec builds on linux
build-codec-linux-make:
rules:
- if: $CI_PIPELINE_SOURCE == 'web'
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs
- if: $CI_PIPELINE_SOURCE == 'schedule'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
extends:
- .build-job-linux
tags:
- ivas-linux
timeout: "10 minutes"
script:
- *print-common-info
- *activate-Werror-linux
- make -j
build-codec-linux-cmake:
rules:
- if: $CI_PIPELINE_SOURCE == 'web'
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs
- if: $CI_PIPELINE_SOURCE == 'schedule'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
extends:
- .build-job-linux
tags:
- ivas-linux
timeout: "10 minutes"
script:
- *print-common-info
- *update-scripts-repo
- cmake -B cmake-build -G "Unix Makefiles"
- cmake --build cmake-build -- -j
# ensure that codec builds on linux with instrumentation active
build-codec-linux-instrumented-make:
rules:
- if: $CI_PIPELINE_SOURCE == 'web'
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'schedule'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
extends:
- .build-job-linux
timeout: "10 minutes"
tags:
- ivas-linux
script:
- *print-common-info
- *update-scripts-repo
- bash scripts/prepare_instrumentation.sh -m MEM_ONLY
- make -j -C $INSTR_DIR
# make sure that the codec builds with msan, asan and usan
build-codec-sanitizers-linux:
extends:
- .build-job-linux
- .rules-basis
tags:
- ivas-linux
timeout: "10 minutes"
script:
- *update-scripts-repo
- *print-common-info
# TODO: re-enable once all the warnings have been fixed
#- *activate-Werror-linux
- bash ci/build_codec_sanitizers_linux.sh
build-codec-windows-msbuild:
rules:
- if: $CI_PIPELINE_SOURCE == 'web'
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' # trigger build job for all MRs
- if: $CI_PIPELINE_SOURCE == 'schedule'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
extends:
- .build-job-windows
timeout: "10 minutes"
tags:
- ivas-windows
script:
- *print-common-info-windows
- *activate-WX-windows
- MSBuild.exe -maxcpucount .\Workspace_msvc\Workspace_msvc.sln /property:Configuration=Debug
build-codec-linux-debugging-make:
rules:
- if: $CI_PIPELINE_SOURCE == 'web'
- if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_PIPELINE_SOURCE == 'schedule'
- if: $CI_PIPELINE_SOURCE == 'push'
when: never
extends:
- .build-job-linux
timeout: "10 minutes"
variables:
BUILD_WITH_DEBUG_MODE_INFO: "true"
script:
- *print-common-info
- *activate-debug-mode-info-if-set
- make -j
# ---------------------------------------------------------------
# Test jobs for merge requests
# ---------------------------------------------------------------
split-rendering-smoke-test:
extends:
- .test-job-linux
- .rules-merge-request-to-main-pc
needs: ["build-codec-linux-make"]
stage: test
script:
- *print-common-info
- *update-scripts-repo
- make -j
- testcase_timeout=10
- python3 -m pytest -q -n auto -rA --junit-xml=report-junit.xml tests/split_rendering/test_split_rendering.py --testcase_timeout=$testcase_timeout
artifacts:
name: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--job-$CI_JOB_NAME--results"
expire_in: 1 week
when: always
paths:
- report-junit.xml
expose_as: "split rendering smoke results"
reports:
junit:
- report-junit.xml
lc3-wrapper-unit-test:
extends:
- .test-job-linux
- .rules-merge-request-to-main-pc
needs: ["build-codec-linux-make"]
stage: test
script:
- *print-common-info
- *update-scripts-repo
- cmake -B cmake-build -G "Unix Makefiles" -DCOPY_EXECUTABLES_FROM_BUILD_DIR=true
- cmake --build cmake-build -- -j
- scripts/split_rendering/lc3plus_basop/ivas_lc3plus_unit_test
# compare split-rendering bitexactness between target and source branch
split-rendering-pytest-on-merge-request:
extends:
- .test-job-linux-needs-testv-dir
- .rules-merge-request-to-main-pc
needs: ["build-codec-linux-make"]
timeout: "60 minutes"
stage: compare
script:
- *print-common-info
- *update-scripts-repo
- *get-commits-behind-count
- *check-commits-behind-count-in-compare-jobs
# some helper variables - "|| true" to prevent failures from grep not finding anything
# write to temporary file as workaround for failures observed with piping echo
- echo $CI_MERGE_REQUEST_TITLE > tmp.txt
- non_be_flag=$(grep -c --ignore-case "\[split*[ -]*non[ -]*be\]" tmp.txt) || true
# TODO: ref_using_target comes from float repo, but does not apply here - disable for now
# - ref_using_target=$(grep -c --ignore-case "\[ref[ -]*using[ -]*target\]" tmp.txt) || true
- ref_using_target=0
# store the current commit hash
- source_branch_commit_sha=$(git rev-parse HEAD)
- *mr-fetch-target-branch
- *mr-get-target-commit
- git checkout $target_commit
- echo "Building reference codec at commit $target_commit"
# build reference binaries
- make -j
- mv IVAS_cod IVAS_cod_ref
- mv IVAS_dec IVAS_dec_ref
- mv IVAS_rend IVAS_rend_ref
### If ref_using_target is not set, checkout the source branch to use scripts and input from there
- if [ $ref_using_target == 0 ]; then git restore lib_com/options.h; fi # Revert changes back before checking out another branch to avoid conflicts
- if [ $ref_using_target == 0 ]; then git checkout $source_branch_commit_sha; fi
- exit_code=0
- testcase_timeout=60
- python3 -m pytest -q --log-level ERROR -n auto -rA --html=report.html --self-contained-html --junit-xml=report-junit.xml tests/split_rendering/test_split_rendering.py --create_ref --testcase_timeout=$testcase_timeout || exit_code=$?
# back to source branch
- git restore lib_com/options.h # Revert changes back before checking out another branch to avoid conflicts
- git checkout $source_branch_commit_sha
- make clean
- make -j
### Run test using scripts and input from main
- if [ $ref_using_target == 1 ]; then git restore lib_com/options.h; fi # Revert changes back before checking out another branch to avoid conflicts
- if [ $ref_using_target == 1 ]; then git checkout $source_branch_commit_sha; fi
- comp_args="--mld --ssnr --odg"
- echo "$comp_args"
# run test
- python3 -m pytest -q --log-level ERROR -n auto -rA --html=report.html --self-contained-html --junit-xml=report-junit.xml tests/split_rendering/test_split_rendering.py $comp_args --create_cut --testcase_timeout=$testcase_timeout || exit_code=$?
- zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true
- *merge-request-comparison-check
allow_failure:
exit_codes:
- 123
artifacts:
name: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--job-$CI_JOB_NAME--results"
expire_in: 2 week
when: always
paths:
- report-junit.xml
- report.html
expose_as: "pytest split rendering results"
reports:
junit:
- report-junit.xml
# ---------------------------------------------------------------
# Short test jobs that run in merge request pipelines
# ---------------------------------------------------------------
.set-reference-for-basop-port-branch: &set-reference-for-basop-port-branch
- if [ $CI_MERGE_REQUEST_TARGET_BRANCH_NAME = "main-pc" ]; then
- if [[ ! "$CI_MERGE_REQUEST_TITLE" =~ \[skip[[:space:]_-]name[[:space:]_-]check\] ]] && [[ ! "$CI_MERGE_REQUEST_TITLE" =~ \[CI\] ]]; then
- *update-scripts-repo
# a bit awkward: write to file + standard out first so that the error message is visivle in case of failure. Then fill the variable from the file
- ci/get_float_ref_branch_name.sh $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME | tee tmp_ref_branch.txt
- FLOAT_REF_BRANCH=$(cat tmp_ref_branch.txt)
- else
- FLOAT_REF_BRANCH="float-pc"
- fi
- fi
### jobs that check for bitexactness of fx encoder and decoder
check-be-to-target-short-enc-0db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- MERGE_TARGET_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=1.0
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
check-be-to-target-short-enc-+10db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- MERGE_TARGET_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=3.162
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
check-be-to-target-short-enc--10db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- MERGE_TARGET_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=0.3162
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
check-be-to-target-short-dec-0db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- MERGE_TARGET_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=1.0
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
check-be-to-target-short-dec-+10db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- MERGE_TARGET_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=3.162
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
check-be-to-target-short-dec--10db:
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- MERGE_TARGET_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=0.3162
- rm -rf tests/dut tests/ref
<<: *check-be-to-target-anchor
### jobs that check for regressions on non-BE testcases
check-regressions-short-enc-0db:
stage: test
needs:
- job: "check-be-to-target-short-enc-0db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=1.0
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
check-regressions-short-enc-+10db:
stage: test
needs:
- job: "check-be-to-target-short-enc-+10db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=3.162
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
check-regressions-short-enc--10db:
stage: test
needs:
- job: "check-be-to-target-short-enc--10db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_DECODER_PATH=./IVAS_dec_ref
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=0.3162
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
check-regressions-short-dec-0db:
stage: test
needs:
- job: "check-be-to-target-short-dec-0db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=1.0
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
check-regressions-short-dec-+10db:
stage: test
needs:
- job: "check-be-to-target-short-dec-+10db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=3.162
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
check-regressions-short-dec--10db:
stage: test
needs:
- job: "check-be-to-target-short-dec--10db"
artifacts: true
extends:
- .rules-mr-to-main-or-main-pc
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- DUT_ENCODER_PATH=./IVAS_cod_ref
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=0.3162
- rm -rf tests/dut tests/ref
<<: *check-regressions-pytest-anchor
# ---------------------------------------------------------------
# Short test jobs for running from web interface or schedule
# ---------------------------------------------------------------
### jobs that test fx encoder -> flt decoder
ivas-pytest-compare_to_ref-short-enc:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- ENCODER_TEST="true"
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-anchor
ivas-pytest-compare_to_ref-short-enc-lev-10:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- ENCODER_TEST="true"
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=0.3162
<<: *ivas-pytest-anchor
ivas-pytest-compare_to_ref-short-enc-lev+10:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- *set-reference-for-basop-port-branch
- USE_LTV=0
- ENCODER_TEST="true"
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=3.162
<<: *ivas-pytest-anchor
ivas-pytest-enc-msan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=1
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_ENCODER
<<: *ivas-pytest-sanitizers-anchor
ivas-pytest-enc-asan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=2
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_ENCODER
<<: *ivas-pytest-sanitizers-anchor
ivas-pytest-enc-usan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=3
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_ENCODER
<<: *ivas-pytest-sanitizers-anchor
### jobs that test flt encoder -> fx decoder
ivas-pytest-compare_to_ref-short-dec:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- USE_LTV=0
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-anchor
ivas-pytest-compare_to_ref-short-dec-lev-10:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- USE_LTV=0
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=0.3162
<<: *ivas-pytest-anchor
ivas-pytest-compare_to_ref-short-dec-lev+10:
extends:
- .rules-pytest-to-ref-short
- .test-job-linux
before_script:
- USE_LTV=0
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=3.162
<<: *ivas-pytest-anchor
### jobs that compare the output synthesis to the input files directly
ivas-pytest-compare-to-input-short-dec:
extends:
- .rules-pytest-to-input-short
- .test-job-linux
before_script:
- USE_LTV=0
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-compare-to-input-anchor
ivas-pytest-compare-to-input-short-enc:
extends:
- .rules-pytest-to-input-short
- .test-job-linux
before_script:
- USE_LTV=0
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$SHORT_TEST_SUITE_ENCODER"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-compare-to-input-anchor
ivas-pytest-dec-msan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=1
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_NO_RENDERER
<<: *ivas-pytest-sanitizers-anchor
ivas-pytest-dec-asan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=2
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_NO_RENDERER
<<: *ivas-pytest-sanitizers-anchor
ivas-pytest-dec-usan:
extends:
- .test-job-linux
tags:
- ivas-linux-fast
before_script:
- CLANG_NUM=3
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE=$LONG_TEST_SUITE_NO_RENDERER
<<: *ivas-pytest-sanitizers-anchor
# ---------------------------------------------------------------
# Long test jobs
# ---------------------------------------------------------------
ivas-pytest-compare_ref-long-enc:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE_ENCODER"
- LEVEL_SCALING=1.0
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-pytest-compare_ref-long-dec:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE"
- LEVEL_SCALING=1.0
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-pytest-compare_ref-long-enc-lev-10:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE_ENCODER"
- LEVEL_SCALING=0.3162
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-pytest-compare_ref-long-dec-lev-10:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE"
- LEVEL_SCALING=0.3162
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-pytest-compare_ref-long-enc-lev+10:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE_ENCODER"
- LEVEL_SCALING=3.162
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-pytest-compare_ref-long-dec-lev+10:
extends:
- .rules-pytest-long
- .test-job-linux
before_script:
- USE_LTV=1
- DUT_ENCODER_PATH=./$REF_ENCODER_PATH
- TEST_SUITE="$LONG_TEST_SUITE"
- LEVEL_SCALING=3.162
- SPLIT_COMPARISON="true"
<<: *ivas-pytest-anchor
ivas-smoke-test-saturation:
extends:
- .rules-pytest-saturation-smoke-test
- .test-job-linux-needs-testv-dir
script:
- USE_LTV=1
- LEVEL_SCALING=32768
- *print-common-info
- *update-scripts-repo
- if [ $USE_LTV -eq 1 ]; then
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- fi
- if [ $LEVEL_SCALING != "1.0" ];then
- *apply-testv-scaling
- fi
- cp -r scripts/testv/* $TESTV_DIR/
# skip prepare_mem_dryrun.py script in smoke_test.sh
- sed -i 's/python3 .\/scripts\/prepare_mem_dryrun.py/#python3 .\/scripts\/prepare_mem_dryrun.py/g' ci/smoke_test.sh
- bash ci/smoke_test.sh
### analyze for failures
- if ! [ -s smoke_test_output.txt ] || ! [ -s smoke_test_output_plc.txt ] || ! [ -s smoke_test_output_jbm_noEXT.txt ] || ! [ -s smoke_test_output_hrtf.txt ]; then echo "Error in smoke test"; exit 1; fi
- ret_val=0
- if cat smoke_test_output.txt | grep -c "failed" ; then echo "Smoke test without PLC failed"; ret_val=1; fi
- if cat smoke_test_output_plc.txt | grep -c "failed"; then echo "Smoke test with PLC failed"; ret_val=1; fi
- if cat smoke_test_output_jbm_noEXT.txt | grep -c "failed"; then echo "Smoke test JBM part failed"; ret_val=1; fi
- if cat smoke_test_output_hrtf.txt | grep -c "failed"; then echo "Smoke test with external hrtf files failed"; ret_val=1; fi
- exit $ret_val
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- smoke_test_output.txt
- smoke_test_output_plc.txt
- smoke_test_output_jbm_noEXT.txt
- smoke_test_output_hrtf.txt
expose_as: "saturation smoke test results"
# GCOV/LCOV coverage analysis of self_test suite
coverage-test-on-main-scheduled:
extends:
- .test-job-linux
- .rules-coverage
stage: test
timeout: 3 hours
script:
- *print-common-info
- *update-scripts-repo
- *update-ltv-repo
- *copy-ltv-files-to-testv-dir
- *build-float-ref-binaries
# Build DuT binaries with GCOV
- make clean >> /dev/null
- make GCOV=1 -j
- cp IVAS_rend IVAS_rend_ref # Copy to ensure instrumented renderer is run in the first pytest call
- testcase_timeout=$TESTCASE_TIMEOUT_LTV
- exit_code_dec=0
- exit_code_enc=0
- python3 -m pytest --tb=no tests/codec_be_on_mr_nonselection tests/renderer --update_ref 1 -v --create_ref --html=report-dec.html --self-contained-html --junit-xml=report-junit-dec.xml -n auto --testcase_timeout $testcase_timeout --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $DUT_DECODER_PATH || exit_code_dec=$?
- lcov -c -d obj -o coverage_dec_rend.info # extract coverage of decoder/renderer
- python3 -m pytest --tb=no tests/codec_be_on_mr_nonselection --encoder_only -v --html=report-enc.html --self-contained-html --junit-xml=report-junit-enc.xml -n auto --testcase_timeout $testcase_timeout --dut_encoder_path $DUT_ENCODER_PATH || exit_code_enc=$?
- lcov -c -d obj -o coverage_enc_dec_rend.info # extract coverage of encoder/decoder/renderer
# remove apps and lib_util files from coverage
- lcov -r coverage_dec_rend.info "*apps*" -o coverage_dec_rend.info
- lcov -r coverage_dec_rend.info "*lib_util*" -o coverage_dec_rend.info
- lcov -r coverage_enc_dec_rend.info "*apps*" -o coverage_enc_dec_rend.info
- lcov -r coverage_enc_dec_rend.info "*lib_util*" -o coverage_enc_dec_rend.info
- commit_sha=$(git rev-parse HEAD)
- genhtml coverage_enc_dec_rend.info -o coverage_enc_dec_rend -t "Coverage on main enc/dec/rend @ $commit_sha"
- genhtml coverage_dec_rend.info -o coverage_dec_rend -t "Coverage on main -- dec/rend @ $commit_sha"
artifacts:
name: "main-coverage-sha-$CI_COMMIT_SHORT_SHA"
when: always
expire_in: 1 week
paths:
- coverage_enc_dec_rend.info
- coverage_dec_rend.info
- coverage_enc_dec_rend
- coverage_dec_rend
- report-dec.html
- report-enc.html
expose_as: "Coverage result"
reports:
junit:
- report-junit-dec.xml
- report-junit-enc.xml
# ---------------------------------------------------------------
# EVS 26.444 test job
# ---------------------------------------------------------------
# check bitexactness to EVS
be-2-evs-26444:
extends:
- .test-job-linux
rules:
- if: $CI_MERGE_REQUEST_TITLE =~ /^(\[Draft\]|\(Draft\)|Draft:)/
when: never
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "evs-26444"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main-pc")
tags:
- be-2-evs-basop
stage: test
timeout: "120 minutes" # To be revisited
script:
- *print-common-info
- *update-scripts-repo
- sed -i".bak" "s/\(#define EVS_FLOAT\)/\/\/\1/" lib_com/options.h
- make -j >> /dev/null
# copy over to never change the testvector dir
- cp -r $EVS_BE_TEST_DIR_BASOP ./evs_be_test
- mkdir -p ./evs_be_test/output/decoded ./evs_be_test/output/bitstreams
- exit_code=0
- python3 -m pytest tests/test_26444.py -v --html=report.html --self-contained-html --junit-xml=report-junit.xml -n auto || exit_code=$?
- if [ $exit_code -eq 1 ]; then echo "Differences encountered"; exit $EXIT_CODE_FAIL; fi
- exit 0
artifacts:
name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
expire_in: 1 week
when: always
paths:
- report-junit.xml
- report.html
expose_as: "EVS 26444 result"
reports:
junit:
- report-junit.xml
ivas-pytest-renderer:
extends:
- .test-job-linux
rules:
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "pytest-renderer"
before_script:
- USE_LTV=0
- TEST_SUITE="tests/renderer"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-anchor
peaq-enc-passthrough:
extends:
- .test-job-linux
rules:
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "peaq-enc-passthrough"
before_script:
- USE_LTV=0
- DUT_DECODER_PATH=./$REF_DECODER_PATH
- TEST_SUITE="tests/test_enc_passthrough.py"
- DELTA_ODG="true"
- LEVEL_SCALING=1.0
<<: *ivas-pytest-anchor
# ---------------------------------------------------------------
# Various other tests
# ---------------------------------------------------------------
# TODO: actually run on MR once main problems are fixed
voip-be-on-merge-request:
extends:
- .test-job-linux-needs-testv-dir
rules:
# - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "main" # only have MR pipelines for MRs to main
- if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "voip-be-test"
stage: test
needs: ["build-codec-linux-make"]
timeout: "10 minutes"
script:
- *print-common-info
- make clean
- make -j >> /dev/null
- python3 -m pytest tests/test_be_for_jbm_neutral_dly_profile.py
# ---------------------------------------------------------------
# Test jobs for merge requests
# ---------------------------------------------------------------
# test that runs all modes with 1s input signals
# TODO: disabled temporarily, needs to be adapted to BASOP
.codec-smoke-test:
extends:
- .test-job-linux-needs-testv-dir
- .rules-merge-request-no-draft
timeout: "20 minutes"
tags:
- ivas-linux
stage: test
needs: ["build-codec-linux-make"] #, "build-codec-instrumented-linux", "build-codec-sanitizers-linux"]
script:
- *print-common-info
# LTV update needed as ltv ISM metadata files are used
- *update-ltv-repo
- bash ci/smoke_test.sh
### analyze for failures
- if ! [ -s smoke_test_output.txt ] || ! [ -s smoke_test_output_jbm.txt ] || ! [ -s smoke_test_output_hrtf.txt ]; then echo "Error in smoke test"; exit 1; fi
- ret_val=0
- if cat smoke_test_output.txt | grep -c "failed"; then echo "Smoke test without JBM failed"; ret_val=1; fi
- if cat smoke_test_output_jbm.txt | grep -c "failed"; then echo "Smoke test JBM part failed"; ret_val=1; fi
- if cat smoke_test_output_hrtf.txt | grep -c "failed"; then echo "Smoke test with external hrtf files failed"; ret_val=1; fi
- exit $ret_val
artifacts:
name: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results"
expire_in: 1 week
when: always
paths:
- smoke_test_output.txt
- smoke_test_output_jbm.txt
- smoke_test_output_hrtf.txt
expose_as: "Smoke test results"
# compare renderer bitexactness between target and source branch
renderer-pytest-on-merge-request:
extends:
- .test-job-linux-needs-testv-dir
- .rules-merge-request-to-main-pc
needs: ["build-codec-linux-make"]
# TODO: set reasonable timeout, will most likely take less
timeout: "20 minutes"
tags:
- ivas-linux
stage: compare
script:
- *print-common-info
- *get-commits-behind-count
- *check-commits-behind-count-in-compare-jobs
- *merge-request-comparison-setup-codec
# some helper variables - "|| true" to prevent failures from grep not finding anything
# write to temporary file as workaround for failures observed with piping echo
- echo $CI_MERGE_REQUEST_TITLE > tmp.txt
- non_be_flag=$(grep -c --ignore-case "\[rend\(erer\)*[ -]*non[ -]*be\]" tmp.txt) || true
# TODO: ref_using_target comes from float repo, but does not apply here - disable for now
# - ref_using_target=$(grep -c --ignore-case "\[ref[ -]*using[ -]*target\]" tmp.txt) || true
- ref_using_target=0
### If ref_using_target is not set, checkout the source branch to use scripts and input from there
- if [ $ref_using_target == 0 ]; then git checkout $source_branch_commit_sha; fi
- exit_code=0
- testcase_timeout=60
- python3 -m pytest -q --log-level ERROR -n auto -rA --html=report.html --self-contained-html --junit-xml=report-junit.xml tests/renderer/test_renderer.py --create_ref --testcase_timeout=$testcase_timeout || exit_code=$?
### Run test using branch scripts and input
- if [ $ref_using_target == 1 ]; then git checkout $source_branch_commit_sha; fi
# run test
- python3 -m pytest -q --log-level ERROR -n auto -rA --html=report.html --self-contained-html --junit-xml=report-junit.xml tests/renderer/test_renderer.py --create_cut --testcase_timeout=$testcase_timeout || exit_code=$?
- zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true
- *merge-request-comparison-check
allow_failure:
exit_codes:
- 123
artifacts:
name: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--job-$CI_JOB_NAME--results"
expire_in: 1 week
when: always
paths:
- report-junit.xml
- report.html
expose_as: "pytest renderer results"
reports:
junit:
- report-junit.xml
# compare bit exactness between target and source branch
ivas-pytest-on-merge-request:
extends:
- .test-job-linux-needs-testv-dir
- .rules-merge-request-to-main-pc
stage: compare
# TODO: broken dependency needs to be removed temporarily, see above
# note: this step doesn't really depend on codec-smoke-test
# it's just pointless to run this step when the smoke test fails and the smoke test should be reasonably fast
# thus, overall, this should save time
needs: ["build-codec-linux-make"] #, "codec-smoke-test"]
timeout: "40 minutes"
tags:
- ivas-linux
script:
- *print-common-info
- *get-commits-behind-count
- *check-commits-behind-count-in-compare-jobs
- *merge-request-comparison-setup-codec
- *remove_unsupported_testcases
# some helper variables - "|| true" to prevent failures from grep not finding anything
# write to temporary file as workaround for failures observed with piping echo
- echo $CI_MERGE_REQUEST_TITLE > tmp.txt
- non_be_flag=$(grep -c --ignore-case "\[non[ -]*be\]" tmp.txt) || true
# TODO: ref_using_target comes from float repo, but does not apply here - disable for now
# - ref_using_target=$(grep -c --ignore-case "\[ref[ -]*using[ -]*target\]" tmp.txt) || true
- ref_using_target=0
### If ref_using_target is not set, checkout the source branch to use scripts and input from there
- if [ $ref_using_target == 0 ]; then git checkout $source_branch_commit_sha; fi
### prepare pytest
# create references
- python3 -m pytest $TESTS_DIR_CODEC_BE_ON_MR -v --update_ref 1 $DISABLE_HRTF_MODEL_TESTS
### Run test using branch scripts and input
- if [ $ref_using_target == 1 ]; then git checkout $source_branch_commit_sha; fi
### run pytest
- exit_code=0
- testcase_timeout=600
- python3 -m pytest $TESTS_DIR_CODEC_BE_ON_MR -v --html=report.html --self-contained-html --junit-xml=report-junit.xml --testcase_timeout=$testcase_timeout $DISABLE_HRTF_MODEL_TESTS || exit_code=$?
- zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true
- *merge-request-comparison-check
allow_failure:
exit_codes:
- 123
artifacts:
name: "mr-$CI_MERGE_REQUEST_IID--sha-$CI_COMMIT_SHORT_SHA--stage-$CI_JOB_STAGE--results"
expire_in: 1 week
when: always
paths:
- report-junit.xml
- report.html
expose_as: "pytest ivas results"
reports:
junit:
- report-junit.xml
# ---------------------------------------------------------------
# Complexity measurement jobs
# ---------------------------------------------------------------
.complexity-measurements-setup:
&complexity-measurements-setup # create necessary environment
### 1. part: mainly same as in float repo - this is boilerplate code to make the gitlab pages presentation work
- mkdir -p wmops/logs
- job_id=$(python3 ci/get_id_of_last_job_occurence.py $CI_COMMIT_REF_NAME $CI_JOB_NAME $CI_PROJECT_ID)
- echo $job_id
# this is a testing/maintenance mechanism to force getting the log history from a specific job id
# see below in the concrete complexity jobs
- if [ "$JOB_ID_INJECT" != "" ]; then job_id=$JOB_ID_INJECT; fi
- curl --silent --show-error --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts.zip
- unzip -qq artifacts.zip || true # this may fail on first run, when there are no artifacts there and the zip file is actually just "404"-html
- public_dir="$CI_JOB_NAME-public"
# if is needed to catch case when no artifact is there (first run), similarly as above
- if [[ -d $public_dir ]]; then mv $public_dir/* wmops/; fi
- ls wmops
- rm artifacts.zip
- rm -rf $public_dir
### 1.5.part: get the corresponding measurement from ivas-float-update
- job_id=$(python3 ci/get_id_of_last_job_occurence.py ivas-float-update $CI_JOB_NAME $CI_PROJECT_ID)
- echo $job_id
- curl --silent --show-error --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts_ref.zip
- unzip -qq -j artifacts_ref.zip "*latest_WMOPS.csv" || true
# add file to arguments only if the artifact could be retrieved to prevent error later.
- if [ -f latest_WMOPS.csv ]; then GET_WMOPS_ARGS="$GET_WMOPS_ARGS latest_WMOPS.csv"; fi
.complexity-measurements-prepare-artifacts:
&complexity-measurements-prepare-artifacts # prepare artifacts -> move to public directory
- public_dir="$CI_JOB_NAME-public"
- mkdir $public_dir
- mv -f wmops/log_*_all.txt ./*.js ${public_dir}/
# move logfiles for links
- mkdir $public_dir/logs
# first move logs
- log_files=$(cat $public_dir/graphs*.js | grep logFile | sed "s/.*\(wmops_newsletter_.*\.csv\).*/\1/g")
- for f in $log_files; do [ -f wmops/logs/$f ] && mv wmops/logs/$f $public_dir/logs/$f; done
# copy index page blueprint
- cp ci/complexity_measurements/index_complexity.html ${public_dir}/index.html
# patch the format in the title
- sed -i "s/IVAS FORMAT/IVAS $in_format to $out_format/g" ${public_dir}/index.html
# do separately here to avoid overwrite complaints by mv
- mv -f ci/complexity_measurements/style.css ${public_dir}/
.complexity-measurements-report-summary: &complexity-measurements-report-summary
- *print-results-banner
- if [ $ret_val -eq 0 ]; then
- echo -e "No crashes occured.\nNo changes in complexity or memory usage (>1%) detected."
- elif [ $ret_val -eq 123 ]; then
- echo -e "Changes in complexity or memory usage (>1%) detected!!!\nNo crashes occured."
- else
- echo -e "Something went wrong in running the codec. Likely some modes were crashing."
- fi
.complexity-template:
extends:
- .test-job-linux
stage: test
variables:
ret_val: 0
GET_WMOPS_ARGS: "mem_only"
timeout: 3 hours 30 minutes
before_script:
- *print-common-info
- *update-scripts-repo
- *update-ltv-repo
- *build-float-ref-and-dut-binaries
- *complexity-measurements-setup
# delete previous jobs logfiles if present (-f flag ensures return calue of 0 even in first run where this folder is not present)
- rm -rf COMPLEXITY/logs
allow_failure:
exit_codes:
- 123
artifacts:
name: "$CI_JOB_NAME--$CI_COMMIT_REF_NAME--sha-$CI_COMMIT_SHA"
when: always
expire_in: 2 week
paths:
- $CI_JOB_NAME-public
- COMPLEXITY/logs
complexity-stereo-in-stereo-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
variables:
JOB_ID_INJECT: ""
script:
- in_format=stereo
- out_format=stereo
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-ism-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 1 hour
variables:
JOB_ID_INJECT: ""
script:
- in_format=ISM
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-ism-in-binaural_room_ir-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 2 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=ISM
- out_format=BINAURAL_ROOM_IR
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-ism-in-ext-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 3 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=ISM
- out_format=EXT
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "ISM+1 ISM+2 ISM+3 ISM+4" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-sba-hoa3-in-hoa3-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 4 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=HOA3
- out_format=HOA3
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-sba-hoa3-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 5 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=HOA3
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-sba-hoa3-in-binaural_room_ir-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 6 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=HOA3
- out_format=BINAURAL_ROOM_IR
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-mc-in-7_1_4-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 7 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=MC
- out_format=7_1_4
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-mc-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 10 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=MC
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-mc-in-binaural_room_ir-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 12 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=MC
- out_format=BINAURAL_ROOM_IR
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-masa-in-ext-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 15 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=MASA
- out_format=EXT
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-masa-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 16 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=MASA
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-masa-in-hoa3-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 17 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=MASA
- out_format=HOA3
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
# complexity-omasa-in-ext-out:
# extends:
# - .complexity-template
# rules:
# - if: $MEASURE_COMPLEXITY_LINUX
# when: delayed
# start_in: 13 hours
# variables:
# JOB_ID_INJECT: ""
# script:
# - in_format=OMASA
# - out_format=EXT
# - ret_val=0
# - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
# - *complexity-measurements-prepare-artifacts
# - *complexity-measurements-report-summary
# - exit $ret_val
complexity-omasa-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 18 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=OMASA
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-omasa-in-hoa3-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 20 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=OMASA
- out_format=HOA3
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-StereoDmxEVS-stereo-in-mono-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 22 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=StereoDmxEVS
- out_format=mono
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
# complexity-osba-in-ext-out:
# extends:
# - .complexity-template
# rules:
# - if: $MEASURE_COMPLEXITY_LINUX
# when: delayed
# start_in: 17 hours
# script:
# - in_format=OSBA
# - out_format=EXT
# - ret_val=0
# - bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
# - *complexity-measurements-prepare-artifacts
# - *complexity-measurements-report-summary
# - exit $ret_val
complexity-osba-in-binaural-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 22 hours 30 minutes
variables:
JOB_ID_INJECT: ""
script:
- in_format=OSBA
- out_format=BINAURAL
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
complexity-osba-in-binaural_room_ir-out:
extends:
- .complexity-template
rules:
- if: $MEASURE_COMPLEXITY_LINUX
when: delayed
start_in: 25 hours
variables:
JOB_ID_INJECT: ""
script:
- in_format=OSBA
- out_format=BINAURAL_ROOM_IR
- ret_val=0
- bash ci/complexity_measurements/getWmops.sh "$in_format" "$out_format" $GET_WMOPS_ARGS || ret_val=$?
- *complexity-measurements-prepare-artifacts
- *complexity-measurements-report-summary
- exit $ret_val
# job that sets up gitlab pages website
pages:
stage: deploy
tags:
- ivas-linux
rules:
- if: $UPDATE_PAGES
script:
- *print-common-info
- *update-scripts-repo
- python3 ci/setup_pages.py
- ls
- ls -lh public
artifacts:
paths:
- public
expire_in: 1 day
# note: GitLab cannot reference variables defined by users in the include ref:, we need to use a YAML anchor for this
# see https://docs.gitlab.com/ci/yaml/includes/#use-variables-with-include for more information
IVAS_CODEC_CI_REF: &IVAS_CODEC_CI_REF main
include:
- local: .gitlab-ci/variables.yml
- local: .gitlab-ci/rules-basis.yml
- project: ivas-codec-pc/ivas-codec-ci
ref: *IVAS_CODEC_CI_REF
file: main-basop.yml
variables:
TESTV_DIR: "/usr/local/testv"
LTV_DIR: "/usr/local/ltv"
EVS_BE_TEST_DIR_BASOP: "/usr/local/be_2_evs_basop"
# TODO: cleanup and use global var defined in ivas-codec-ci
FLOAT_REF_BRANCH: "ivas-float-update"
FLOAT_REF_BRANCH_MERGE_SOURCE: ""
BUILD_OUTPUT: "build_output.txt"
SCRIPTS_DIR: "/usr/local/scripts"
EXIT_CODE_NON_BE: 123
EXIT_CODE_FAIL: 1
LONG_TEST_SUITE: "tests/codec_be_on_mr_nonselection tests/renderer --param_file scripts/config/self_test_ltv.prm --use_ltv"
LONG_TEST_SUITE_NO_RENDERER: "tests/codec_be_on_mr_nonselection --param_file scripts/config/self_test_ltv.prm --use_ltv"
SHORT_TEST_SUITE: "tests/codec_be_on_mr_nonselection"
......@@ -10,20 +17,42 @@ variables:
TEST_SUITE: ""
# These path variables are used by the pytest calls.
# They can be overwritten in the job templates to e.g. only test encoder or decoder in the chain
DUT_ENCODER_PATH: "./IVAS_cod"
DUT_DECODER_PATH: "./IVAS_dec"
DUT_ENCODER_PATH: "./IVAS_cod_dut"
DUT_DECODER_PATH: "./IVAS_dec_dut"
DUT_RENDERER_PATH: "./IVAS_rend_dut"
DUT_POST_RENDERER_PATH: "./ISAR_post_rend_dut"
REF_ENCODER_PATH: "./IVAS_cod_ref"
REF_DECODER_PATH: "./IVAS_dec_ref"
REF_RENDERER_PATH: "./IVAS_rend_ref"
REF_POST_RENDERER_PATH: "./ISAR_post_rend_ref"
MERGE_TARGET_ENCODER_PATH: "./IVAS_cod_merge_target"
MERGE_TARGET_DECODER_PATH: "./IVAS_dec_merge_target"
MERGE_TARGET_RENDERER_PATH: "./IVAS_rend_merge_target"
MERGE_TARGET_POST_RENDERER_PATH: "./ISAR_post_rend_merge_target"
MERGE_SOURCE_FLOAT_REF_ENCODER_PATH: "./IVAS_cod_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_DECODER_PATH: "./IVAS_dec_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_RENDERER_PATH: "./IVAS_rend_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_POST_RENDERER_PATH: "./ISAR_post_rend_merge_source_float_ref"
# These path variables are used for building the binaries
# They should never be overwritten!
DUT_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_dut"
DUT_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_dut"
DUT_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_rend_dut"
DUT_POST_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./ISAR_post_rend_dut"
REF_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_ref"
REF_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_ref"
REF_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_rend_ref"
REF_POST_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./ISAR_post_rend_ref"
MERGE_TARGET_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_merge_target"
MERGE_TARGET_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_merge_target"
MERGE_TARGET_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_rend_merge_target"
MERGE_TARGET_POST_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./ISAR_post_rend_merge_target"
MERGE_SOURCE_FLOAT_REF_ENCODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_cod_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_DECODER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_dec_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./IVAS_rend_merge_source_float_ref"
MERGE_SOURCE_FLOAT_REF_POST_RENDERER_PATH_FOR_BUILD_DO_NOT_MODIFY: "./ISAR_post_rend_merge_source_float_ref"
LEVEL_SCALING: "1.0"
BASOP_CI_BRANCH_PC_REPO: "basop-ci-branch"
IVAS_PIPELINE_NAME: ''
PRM_FILES: "scripts/config/self_test.prm scripts/config/self_test_ltv.prm"
TESTCASE_TIMEOUT_STV: 900
TESTCASE_TIMEOUT_LTV: 2400
......@@ -32,7 +61,9 @@ variables:
CI_REGRESSION_THRESH_MAX_ABS_DIFF: "50"
CI_REGRESSION_THRESH_SSNR: "-1"
CI_REGRESSION_THRESH_ODG: "-0.05"
GIT_CLEAN_FLAGS: -ffdxq
INSTR_DIR: "scripts/c-code_instrument"
TESTS_DIR_CODEC_BE_ON_MR: "tests/codec_be_on_mr_nonselection"
BUILD_WITH_DEBUG_MODE_INFO: ""
ENCODER_TEST: ""
DELTA_ODG: ""
......@@ -46,6 +77,10 @@ variables:
FLOAT_REF_COMMIT_FILE: "float-ref-git-sha.txt"
CUT_COMMIT_FILE: "CuT-git-sha.txt"
MERGE_TARGET_COMMIT_FILE: "merge-target-git-sha.txt"
MERGE_SOURCE_FLOAT_REF_COMMIT_FILE: "merge-source-float-ref-git-sha.txt"
RUNNER_TAG: "ivas-basop-linux"
LOGS_BACKUP_SOURCE_DIR: ""
LOGS_BACKUP_TARGET_DIR: ""
MANUAL_PIPELINE_TYPE:
description: "Type for the manual pipeline run. Use 'pytest-compare' to run comparison test against reference float codec."
value: 'default'
......@@ -54,6 +89,7 @@ variables:
- 'pytest-compare'
- 'pytest-compare-enc-dmx'
- 'pytest-compare-long'
- 'pytest-compare-long-fx-fx'
- 'pytest-compare-to-input'
- 'pytest-saturation-smoke-test'
- 'evs-26444'
......@@ -62,4 +98,7 @@ variables:
- 'complexity'
- 'coverage'
- 'voip-be-test'
- 'renderer-framesize-be'
- 'peaq-enc-passthrough'
- 'long-term-logs'
- 'backup-long-term-logs'
......@@ -58,7 +58,7 @@ if(UNIX)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ffp-contract=off") # disable floating point operation contraction
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -pedantic -Wcast-qual -Wall -W -Wextra -Wno-long-long")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith -Wstrict-prototypes -Wmissing-prototypes")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-parameter")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-parameter -Wno-implicit-fallthrough")
# to be uncommented in CI
# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror")
......
......@@ -166,9 +166,7 @@ int main(
IVAS_RENDER_FRAMESIZE asked_frame_size;
IVAS_DEC_HRTF_HANDLE *hHrtfTD = NULL;
IVAS_DEC_HRTF_CREND_HANDLE *hSetOfHRTF = NULL;
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
IVAS_DEC_HRTF_STATISTICS_HANDLE *hHrtfStatistics = NULL;
#endif
#ifdef WMOPS
reset_wmops();
......@@ -586,16 +584,6 @@ int main(
}
}
#ifndef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
if ( ( *hHrtfTD != NULL ) && ( error = load_reverb_binary( *hHrtfTD, hrtfReader ) ) != IVAS_ERR_OK )
{
if ( error != IVAS_ERR_BINARY_FILE_WITHOUT_BINAURAL_RENDERER_DATA )
{
fprintf( stderr, "\nError in loading HRTF binary file %s \n\n", arg.hrtfFileName );
goto cleanup;
}
}
#endif
if ( ( error = IVAS_DEC_GetHrtfCRendHandle( hIvasDec, &hSetOfHRTF ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nIVAS_DEC_GetHrtfCRendHandle failed: %s\n\n", IVAS_DEC_GetErrorMessage( error ) );
......@@ -654,7 +642,6 @@ int main(
destroy_parambin_hrtf( hHrtfParambin );
}
}
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
if ( ( error = IVAS_DEC_GetHrtfStatisticsHandle( hIvasDec, &hHrtfStatistics ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nIVAS_DEC_GetHrtfHandle failed: %s\n\n", IVAS_DEC_GetErrorMessage( error ) );
......@@ -672,16 +659,6 @@ int main(
destroy_hrtf_statistics( hHrtfStatistics );
}
}
#else
if ( ( hHrtfTD != NULL ) && ( *hHrtfTD != NULL ) && ( error = load_reverb_binary( *hHrtfTD, hrtfReader ) ) != IVAS_ERR_OK )
{
if ( error != IVAS_ERR_BINARY_FILE_WITHOUT_BINAURAL_RENDERER_DATA )
{
fprintf( stderr, "\nError in loading HRTF binary file %s \n\n", arg.hrtfFileName );
goto cleanup;
}
}
#endif
}
/*------------------------------------------------------------------------------------------*
......@@ -2830,7 +2807,14 @@ static ivas_error decodeVoIP(
}
vec_pos_update = ( vec_pos_update + 1 ) % vec_pos_len;
frame++;
#ifdef NONBE_FIX_864_JBM_RENDER_FRAMESIZE
if ( vec_pos_update == 0 )
{
systemTime_ms += vec_pos_len * systemTimeInc_ms;
}
#else
systemTime_ms += systemTimeInc_ms;
#endif
#ifdef WMOPS
update_mem();
......@@ -2838,6 +2822,90 @@ static ivas_error decodeVoIP(
#endif
}
#ifdef NONBE_FIX_864_JBM_RENDER_FRAMESIZE
int16_t nSamplesFlushed = 0;
/* decode and get samples */
if ( ( error = IVAS_DEC_Flush( hIvasDec, nOutSamples, pcmBuf, &nSamplesFlushed ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nError in IVAS_DEC_VoIP_Flush: %s\n", IVAS_DEC_GetErrorMessage( error ) );
goto cleanup;
}
if ( nSamplesFlushed )
{
/* Write current frame */
if ( ( error = AudioFileWriter_write( afWriter, pcmBuf, nSamplesFlushed * nOutChannels ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nOutput audio file writer error\n" );
goto cleanup;
}
/* Write ISm metadata to external file(s) */
if ( decodedGoodFrame && arg.outputConfig == IVAS_AUDIO_CONFIG_EXTERNAL )
{
if ( bsFormat == IVAS_DEC_BS_OBJ || bsFormat == IVAS_DEC_BS_MASA_ISM || bsFormat == IVAS_DEC_BS_SBA_ISM )
{
if ( ( error = IVAS_DEC_GetNumObjects( hIvasDec, &numObj ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nError in IVAS_DEC_GetNumObjects: %s\n", IVAS_DEC_GetErrorMessage( error ) );
goto cleanup;
}
for ( i = 0; i < numObj; ++i )
{
IVAS_ISM_METADATA IsmMetadata;
if ( ( error = IVAS_DEC_GetObjectMetadata( hIvasDec, &IsmMetadata, 0, i ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nError in IVAS_DEC_GetObjectMetadata: %s\n", IVAS_DEC_GetErrorMessage( error ) );
goto cleanup;
}
if ( ( IsmFileWriter_writeFrame( IsmMetadata, ismWriters[i] ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nError writing ISM metadata to file %s\n", IsmFileWriter_getFilePath( ismWriters[i] ) );
goto cleanup;
}
}
}
if ( bsFormat == IVAS_DEC_BS_MASA || bsFormat == IVAS_DEC_BS_MASA_ISM )
{
IVAS_MASA_DECODER_EXT_OUT_META_HANDLE hMasaExtOutMeta;
#ifdef NONBE_FIX_984_OMASA_EXT_OUTPUT
int16_t fullDelayNumSamples[3];
float delayMs;
/* delayNumSamples is zeroed, and delayNumSamples_orig is updated only on first good frame, so need to re-fetch delay info */
if ( ( error = IVAS_DEC_GetDelay( hIvasDec, fullDelayNumSamples, &delayTimeScale ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nUnable to get delay of decoder: %s\n", ivas_error_to_string( error ) );
}
#endif
if ( ( error = IVAS_DEC_GetMasaMetadata( hIvasDec, &hMasaExtOutMeta, 0 ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nError in IVAS_DEC_GetMasaMetadata: %s\n", IVAS_DEC_GetErrorMessage( error ) );
goto cleanup;
}
#ifdef NONBE_FIX_984_OMASA_EXT_OUTPUT
delayMs = (float) ( fullDelayNumSamples[0] ) / (float) ( delayTimeScale );
if ( ( error = MasaFileWriter_writeFrame( masaWriter, hMasaExtOutMeta, &delayMs ) ) != IVAS_ERR_OK )
#else
if ( ( error = MasaFileWriter_writeFrame( masaWriter, hMasaExtOutMeta ) ) != IVAS_ERR_OK )
#endif
{
fprintf( stderr, "\nError writing MASA metadata to file: %s\n", MasaFileWriter_getFilePath( masaWriter ) );
goto cleanup;
}
}
}
}
#endif
/*------------------------------------------------------------------------------------------*
* Add zeros at the end to have equal length of synthesized signals
*------------------------------------------------------------------------------------------*/
......
......@@ -758,12 +758,7 @@ int main(
}
/* *** Encode one frame *** */
if ( ( error = IVAS_ENC_EncodeFrameToSerial( hIvasEnc, pcmBuf, pcmBufSize, bitStream, &numBits
#ifdef DBG_BITSTREAM_ANALYSIS
,
frame
#endif
) ) != IVAS_ERR_OK )
if ( ( error = IVAS_ENC_EncodeFrameToSerial( hIvasEnc, pcmBuf, pcmBufSize, bitStream, &numBits ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nencodeFrame failed: %s\n\n", IVAS_ENC_GetErrorMessage( error ) );
goto cleanup;
......
......@@ -735,9 +735,7 @@ int main(
IVAS_DEC_HRTF_FASTCONV_HANDLE *hHrtfFastConv = NULL;
IVAS_DEC_HRTF_PARAMBIN_HANDLE *hHrtfParambin = NULL;
IVAS_DEC_HRTF_HANDLE *hHrtfTD = NULL;
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
IVAS_DEC_HRTF_STATISTICS_HANDLE *hHrtfStatistics = NULL;
#endif
IsmPositionProvider *positionProvider = NULL;
LfeRoutingConfig *lfeRoutingConfigs[RENDERER_MAX_MC_INPUTS];
RenderConfigReader *renderConfigReader = NULL;
......@@ -992,16 +990,6 @@ int main(
}
}
#ifndef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
if ( ( hHrtfTD != NULL ) && ( error = load_reverb_binary( *hHrtfTD, hrtfFileReader ) ) != IVAS_ERR_OK )
{
if ( error != IVAS_ERR_BINARY_FILE_WITHOUT_BINAURAL_RENDERER_DATA )
{
fprintf( stderr, "\nError in loading HRTF binary file %s: %s \n\n", args.customHrtfFilePath, ivas_error_to_string( error ) );
goto cleanup;
}
}
#endif
if ( ( error = IVAS_REND_GetHrtfCRendHandle( hIvasRend, &hSetOfHRTF ) ) != IVAS_ERR_OK )
{
......@@ -1062,7 +1050,6 @@ int main(
}
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
if ( ( error = IVAS_REND_GetHrtfStatisticsHandle( hIvasRend, &hHrtfStatistics ) ) != IVAS_ERR_OK )
{
fprintf( stderr, "\nIVAS_REND_GetHrtfStatisticsHandle failed\n\n" );
......@@ -1080,7 +1067,6 @@ int main(
destroy_hrtf_statistics( hHrtfStatistics );
}
}
#endif
hrtfFileReader_close( &hrtfFileReader );
}
......@@ -2107,9 +2093,7 @@ cleanup:
{
destroy_td_hrtf( hHrtfTD );
}
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
destroy_hrtf_statistics( hHrtfStatistics );
#endif
IVAS_REND_Close( &hIvasRend );
IsmPositionProvider_close( positionProvider );
RenderConfigReader_close( &renderConfigReader );
......
......@@ -3582,13 +3582,13 @@ Word16 i_mult_o( Word16 a, Word16 b, Flag *Overflow )
#ifdef ORIGINAL_G7231
return a * b;
#else
register Word32 c = a * b;
register Word32 c = (Word32) a * b;
return saturate_o( c, Overflow );
#endif
}
Word16 i_mult( Word16 a, Word16 b )
{
return i_mult_o( a, b, NULL );
return i_mult_sat( a, b );
}
Word16 i_mult_sat( Word16 a, Word16 b )
{
......
......@@ -1062,7 +1062,6 @@ Word32 BASOP_Util_Divide3232_Scale_cadence( Word32 x, Word32 y, Word16 *s )
return z;
}
#ifdef DIV32_OPT_NEWTON
Word32 div_w_newton( Word32 num, Word32 den );
/*
Table of 256 precalculated estimates to be used by the "div_w_newton"
......@@ -1409,7 +1408,11 @@ Word32 BASOP_Util_Divide3232_Scale_newton( Word32 x, Word32 y, Word16 *s )
*s = 0;
return ( (Word32) 0 );
}
#ifdef FIX_USAN_BASOP_UTIL_DIVIDE3232
IF( EQ_32( y, (Word32) 0x80000000 ) )
#else
IF( EQ_32( y, 0x80000000 ) )
#endif
{
/* Division by -1.0: same as negation of numerator */
/* Return normalized negated numerator */
......@@ -1458,7 +1461,6 @@ Word32 BASOP_Util_Divide3232_Scale_newton( Word32 x, Word32 y, Word16 *s )
return z;
}
#endif /* DIV32_OPT_NEWTON */
Word16 BASOP_Util_Divide3232_Scale( Word32 x, Word32 y, Word16 *s )
{
......@@ -1914,6 +1916,26 @@ Word16 findIndexOfMinWord32( Word32 *x, const Word16 len )
return indx;
}
Word16 findIndexOfMinWord64( Word64 *x, const Word16 len )
{
Word16 i, indx;
indx = 0;
move16();
FOR( i = 1; i < len; i++ )
{
if ( LT_64( x[i], x[indx] ) )
{
indx = i;
move16();
}
}
return indx;
}
Word16 imult1616( Word16 x, Word16 y )
{
......
......@@ -333,11 +333,9 @@ Word32 BASOP_Util_Divide3232_Scale_cadence( Word32 x, /*!< i : Numerator*/
Word16 *s ); /*!< o : Additional scalefactor difference*/
#ifdef DIV32_OPT_NEWTON
Word32 BASOP_Util_Divide3232_Scale_newton( Word32 x, /*!< i : Numerator*/
Word32 y, /*!< i : Denominator*/
Word16 *s ); /*!< o : Additional scalefactor difference*/
#endif
/************************************************************************/
......@@ -538,6 +536,7 @@ Word16 findIndexOfMinWord16( Word16 *x, const Word16 len );
\return index of min Word32
*/
Word16 findIndexOfMinWord32( Word32 *x, const Word16 len );
Word16 findIndexOfMinWord64( Word64 *x, const Word16 len );
/****************************************************************************/
/*!
......
......@@ -48,9 +48,12 @@
#include "wmc_auto.h"
#include "ivas_prot_fx.h"
#include "prot_fx_enc.h"
#ifdef DEBUGGING
#include "debug.h"
#ifdef DBG_BITSTREAM_ANALYSIS
#include <string.h>
#endif
#endif
#define STEP_MAX_NUM_INDICES 100 /* increase the maximum number of allowed indices in the list by this amount */
......@@ -201,7 +204,7 @@ ivas_error ind_list_realloc(
{
new_ind_list[i].id = old_ind_list[i].id;
new_ind_list[i].value = old_ind_list[i].value;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( new_ind_list[i].function_name, old_ind_list[i].function_name, 100 );
#endif
move16();
......@@ -215,7 +218,7 @@ ivas_error ind_list_realloc(
FOR( ; i < max_num_indices; i++ )
{
new_ind_list[i].nb_bits = -1;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
sprintf( new_ind_list[i].function_name, "RESET in ind_list_realloc" );
#endif
move16();
......@@ -808,7 +811,7 @@ void move_indices(
new_ind_list[i].value = old_ind_list[i].value;
new_ind_list[i].nb_bits = old_ind_list[i].nb_bits;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( new_ind_list[i].function_name, old_ind_list[i].function_name, 100 );
#endif
old_ind_list[i].nb_bits = -1;
......@@ -821,12 +824,12 @@ void move_indices(
new_ind_list[i].id = old_ind_list[i].id;
new_ind_list[i].value = old_ind_list[i].value;
new_ind_list[i].nb_bits = old_ind_list[i].nb_bits;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( new_ind_list[i].function_name, old_ind_list[i].function_name, 100 );
#endif
old_ind_list[i].nb_bits = -1;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
sprintf( old_ind_list[i].function_name, "RESET in move_indices" );
#endif
}
......@@ -897,7 +900,7 @@ ivas_error check_ind_list_limits(
return error;
}
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
const char *named_indices_table[] = {
"IND_IVAS_FORMAT",
"IND_SMODE_OMASA",
......@@ -2817,7 +2820,7 @@ ivas_error push_indice(
hBstr->ind_list[j].id = hBstr->ind_list[j - 1].id;
hBstr->ind_list[j].nb_bits = hBstr->ind_list[j - 1].nb_bits;
hBstr->ind_list[j].value = hBstr->ind_list[j - 1].value;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( hBstr->ind_list[j].function_name, hBstr->ind_list[j - 1].function_name, 100 );
#endif
move16();
......@@ -2831,7 +2834,7 @@ ivas_error push_indice(
hBstr->ind_list[i].id = id;
hBstr->ind_list[i].value = value;
hBstr->ind_list[i].nb_bits = nb_bits;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( hBstr->ind_list[i].function_name, named_indices_table[id], 100 );
#endif
move16();
......@@ -2852,13 +2855,11 @@ ivas_error push_indice(
*
* Push a new indice into the buffer at the next position
*-------------------------------------------------------------------*/
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
ivas_error push_next_indice_(
const char *caller,
#else
ivas_error push_next_indice(
#endif
#ifdef DBG_BITSTREAM_ANALYSIS
const char *caller,
#endif
BSTR_ENC_HANDLE hBstr, /* i/o: encoder bitstream handle */
UWord16 value, /* i : value of the quantized indice */
......@@ -2897,7 +2898,7 @@ ivas_error push_next_indice(
move16();
move16();
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( hBstr->ind_list[hBstr->nb_ind_tot].function_name, caller, 100 );
#endif
......@@ -2914,13 +2915,11 @@ ivas_error push_next_indice(
* push_next_bits()
* Push a bit buffer into the buffer at the next position
*-------------------------------------------------------------------*/
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
ivas_error push_next_bits_(
const char *caller,
#else
ivas_error push_next_bits(
#endif
#ifdef DBG_BITSTREAM_ANALYSIS
const char *caller,
#endif
BSTR_ENC_HANDLE hBstr, /* i/o: encoder bitstream handle */
const UWord16 bits[], /* i : bit buffer to pack, sequence of single bits */
......@@ -2965,7 +2964,7 @@ ivas_error push_next_bits(
ptr->value = code;
ptr->nb_bits = 16;
ptr->id = prev_id;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( ptr->function_name, caller, 100 );
#endif
hBstr->nb_ind_tot = add( hBstr->nb_ind_tot, 1 );
......@@ -2989,7 +2988,7 @@ ivas_error push_next_bits(
ptr->value = bits[i];
ptr->nb_bits = 1;
ptr->id = prev_id;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( ptr->function_name, caller, 100 );
#endif
hBstr->nb_ind_tot = add( hBstr->nb_ind_tot, 1 );
......@@ -3066,7 +3065,7 @@ UWord16 delete_indice(
hBstr->ind_list[j].id = hBstr->ind_list[i].id;
hBstr->ind_list[j].value = hBstr->ind_list[i].value;
hBstr->ind_list[j].nb_bits = hBstr->ind_list[i].nb_bits;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
strncpy( hBstr->ind_list[j].function_name, hBstr->ind_list[i].function_name, 100 );
#endif
}
......@@ -3080,7 +3079,7 @@ UWord16 delete_indice(
{
/* reset the shifted indices at the end of the list */
hBstr->ind_list[j].nb_bits = -1;
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
sprintf( hBstr->ind_list[j].function_name, "RESET in delete_indice" );
#endif
}
......@@ -3319,10 +3318,6 @@ static ivas_error write_indices_element_fx(
UWord16 **pt_stream, /* i : pointer to bitstream buffer */
const Word16 is_SCE, /* i : flag to distingusih SCE and CPE */
const Word16 element_id /* i : id of the SCE or CPE */
#ifdef DBG_BITSTREAM_ANALYSIS
,
int32_t frame
#endif
)
{
Word16 ch;
......@@ -3429,7 +3424,7 @@ static ivas_error write_indices_element_fx(
}
}
#ifdef DBG_BITSTREAM_ANALYSIS
#if defined( DEBUGGING ) && defined( DBG_BITSTREAM_ANALYSIS )
if ( is_SCE ) /* EVS and SCE */
{
static FILE *f1 = 0;
......@@ -3539,10 +3534,6 @@ ivas_error write_indices_ivas_fx(
Encoder_Struct *st_ivas, /* i/o: encoder state structure */
UWord16 *bit_stream, /* i/o: output bitstream */
UWord16 *num_bits /* i : number of indices written to output */
#ifdef DBG_BITSTREAM_ANALYSIS
,
int32_t frame
#endif
)
{
Word16 i, n;
......@@ -3566,22 +3557,12 @@ ivas_error write_indices_ivas_fx(
FOR( n = 0; n < st_ivas->nSCE; n++ )
{
write_indices_element_fx( st_ivas, &pt_stream, 1, n
#ifdef DBG_BITSTREAM_ANALYSIS
,
frame
#endif
);
write_indices_element_fx( st_ivas, &pt_stream, 1, n );
}
FOR( n = 0; n < st_ivas->nCPE; n++ )
{
write_indices_element_fx( st_ivas, &pt_stream, 0, n
#ifdef DBG_BITSTREAM_ANALYSIS
,
frame
#endif
);
write_indices_element_fx( st_ivas, &pt_stream, 0, n );
}
*num_bits = (UWord16) ( pt_stream - bit_stream );
......
......@@ -242,130 +242,6 @@ Word16 rate2EVSmode(
*
* Push a new indice into the buffer
*-------------------------------------------------------------------*/
#ifndef HARM_PUSH_BIT
void push_indice_fx(
BSTR_ENC_HANDLE hBstr, /* i/o: encoder state structure */
Word16 id, /* i : ID of the indice */
UWord16 value, /* i : value of the quantized indice */
Word16 nb_bits /* i : number of bits used to quantize the indice */
)
{
Word16 i;
IF( EQ_16( hBstr->last_ind_fx, id ) )
{
/* indice with the same name as the previous one */
i = hBstr->next_ind_fx;
move16();
}
ELSE
{
/* new indice - find an empty slot in the list */
i = id;
move16();
WHILE( hBstr->ind_list[i].nb_bits != -1 )
{
i = add( i, 1 );
}
}
/* store the values in the list */
hBstr->ind_list[i].value = value;
move16();
hBstr->ind_list[i].nb_bits = nb_bits;
move16();
/* updates */
hBstr->next_ind_fx = add( i, 1 );
move16();
hBstr->last_ind_fx = id;
move16();
hBstr->nb_bits_tot = add( hBstr->nb_bits_tot, nb_bits );
move16();
return;
}
/*-------------------------------------------------------------------*
* push_next_indice() *
* Push a new indice into the buffer at the next position
*-------------------------------------------------------------------*/
void push_next_indice_fx(
BSTR_ENC_HANDLE hBstr, /* i/o: encoder state structure */
UWord16 value, /* i : value of the quantized indice */
Word16 nb_bits /* i : number of bits used to quantize the indice */
)
{
/* store the values in the list */
hBstr->ind_list[hBstr->next_ind_fx].value = value;
move16();
hBstr->ind_list[hBstr->next_ind_fx].nb_bits = nb_bits;
move16();
hBstr->next_ind_fx = add( hBstr->next_ind_fx, 1 );
move16();
/* update the total number of bits already written */
hBstr->nb_bits_tot = add( hBstr->nb_bits_tot, nb_bits );
move16();
return;
}
/*-------------------------------------------------------------------*
* push_next_bits()
* Push a bit buffer into the buffer at the next position
*-------------------------------------------------------------------*/
void push_next_bits_fx(
BSTR_ENC_HANDLE hBstr, /* i/o: encoder state structure */
Word16 bits[], /* i : bit buffer to pack, sequence of single bits */
Word16 nb_bits /* i : number of bits to pack */
)
{
UWord16 code;
Word16 i, nb_bits_m15;
Indice *ptr;
ptr = &hBstr->ind_list[hBstr->next_ind_fx];
nb_bits_m15 = sub( nb_bits, 15 );
i = 0;
move16();
IF( nb_bits_m15 > 0 )
{
FOR( ; i < nb_bits_m15; i += 16 )
{
code = s_or( lshl( bits[i], 15 ), s_or( lshl( bits[i + 1], 14 ), s_or( lshl( bits[i + 2], 13 ), s_or( lshl( bits[i + 3], 12 ),
s_or( lshl( bits[i + 4], 11 ), s_or( lshl( bits[i + 5], 10 ), s_or( lshl( bits[i + 6], 9 ), s_or( lshl( bits[i + 7], 8 ),
s_or( lshl( bits[i + 8], 7 ), s_or( lshl( bits[i + 9], 6 ), s_or( lshl( bits[i + 10], 5 ), s_or( lshl( bits[i + 11], 4 ),
s_or( lshl( bits[i + 12], 3 ), s_or( lshl( bits[i + 13], 2 ), s_or( lshl( bits[i + 14], 1 ), bits[i + 15] ) ) ) ) ) ) ) ) ) ) ) ) ) ) );
ptr->value = code;
move16();
ptr->nb_bits = 16;
move16();
++ptr;
}
}
IF( LT_16( i, nb_bits ) )
{
FOR( ; i < nb_bits; ++i )
{
ptr->value = bits[i];
move16();
ptr->nb_bits = 1;
move16();
++ptr;
}
}
hBstr->next_ind_fx = (Word16) ( ptr - hBstr->ind_list );
move16();
hBstr->nb_bits_tot = add( hBstr->nb_bits_tot, nb_bits );
move16();
}
#endif
/*-------------------------------------------------------------------*
* get_next_indice_fx( )
......@@ -528,12 +404,6 @@ void reset_indices_enc_fx(
move16();
hBstr->nb_bits_tot = 0;
move16();
#ifndef HARM_PUSH_BIT
hBstr->next_ind_fx = 0;
move16();
hBstr->last_ind_fx = -1;
move16();
#endif
FOR( i = 0; i < max_num_indices; i++ )
{
hBstr->ind_list[i].nb_bits = -1;
......@@ -564,215 +434,11 @@ void reset_indices_dec_fx(
*
* Write the buffer of indices to a file
*-------------------------------------------------------------------*/
#ifndef HARM_PUSH_BIT
void write_indices_fx(
Encoder_State *st_fx, /* i/o: encoder state structure */
BSTR_ENC_HANDLE hBstr, /* i/o: encoder state structure */
FILE *file /* i : output bitstream file */
,
UWord8 *pFrame, /* i: byte array with bit packet and byte aligned coded speech data */
Word16 pFrame_size /* i: size of the binary encoded access unit [bits] */
)
{
Word16 i, k;
Word16 stream[2 + MAX_BITS_PER_FRAME], *pt_stream;
Word32 mask;
UWord8 header;
Word16 isAmrWb = 0;
move16();
IF( st_fx->bitstreamformat == G192 )
{
/*-----------------------------------------------------------------*
* Encode Sync Header and Frame Length
*-----------------------------------------------------------------*/
pt_stream = stream;
FOR( i = 0; i < ( 2 + MAX_BITS_PER_FRAME ); ++i )
{
stream[i] = 0;
move16();
}
*pt_stream++ = SYNC_GOOD_FRAME;
move16();
*pt_stream++ = hBstr->nb_bits_tot;
move16();
/*----------------------------------------------------------------*
* Bitstream packing (conversion of individual indices into a serial stream)
* Writing the serial stream into file
*----------------------------------------------------------------*/
FOR( i = 0; i < MAX_NUM_INDICES; i++ )
{
IF( NE_16( hBstr->ind_list[i].nb_bits, -1 ) )
{
/* mask from MSB to LSB */
mask = L_shl( 1, sub( hBstr->ind_list[i].nb_bits, 1 ) );
/* write bit by bit */
FOR( k = 0; k < hBstr->ind_list[i].nb_bits; k++ )
{
IF( L_and( hBstr->ind_list[i].value, mask ) )
{
*pt_stream++ = G192_BIN1;
move16();
}
ELSE
{
*pt_stream++ = G192_BIN0;
move16();
}
mask = L_shr( mask, 1 );
}
}
}
}
ELSE
{
/* Create and write ToC header */
/* qbit always set to 1 on encoder side for AMRWBIO , no qbit in use for EVS, but set to 0(bad) */
header = (UWord8) ( s_or( s_or( shl( st_fx->Opt_AMR_WB, 5 ), shl( st_fx->Opt_AMR_WB, 4 ) ), rate2EVSmode( L_mult0( hBstr->nb_bits_tot, 50 ), &isAmrWb ) ) );
move16();
fwrite( &header, sizeof( UWord8 ), 1, file );
/* Write speech bits */
fwrite( pFrame, sizeof( UWord8 ), shr( add( pFrame_size, 7 ), 3 ), file );
}
/* Clearing of indices */
FOR( i = 0; i < MAX_NUM_INDICES; i++ )
{
hBstr->ind_list[i].nb_bits = -1;
move16();
}
IF( st_fx->bitstreamformat == G192 )
{
/* write the serial stream into file */
fwrite( stream, sizeof( unsigned short ), 2 + stream[1], file );
}
/* reset index pointers */
hBstr->nb_bits_tot = 0;
move16();
hBstr->next_ind_fx = 0;
move16();
hBstr->last_ind_fx = -1;
move16();
return;
}
#endif
/*-------------------------------------------------------------------*
* write_indices_buf_fx()
*
* Write the buffer of indices to a file
*-------------------------------------------------------------------*/
#ifndef HARM_PUSH_BIT
void write_indices_buf_fx(
Encoder_State *st_fx, /* i/o: encoder state structure */
BSTR_ENC_HANDLE hBstr, /* i/o: encoder state structure */
UWord16 *out_buf, /* i : output bitstream buf */
UWord8 *pFrame, /* i: byte array with bit packet and byte aligned coded speech data */
Word16 pFrame_size, /* i: size of the binary encoded access unit [bits] */
UWord16 *num_bits )
{
Word16 i, k;
Word16 stream[2 + MAX_BITS_PER_FRAME], *pt_stream;
Word32 mask;
UWord8 header;
Word16 isAmrWb = 0;
IF( st_fx->bitstreamformat == G192 )
{
/*-----------------------------------------------------------------*
* Encode Sync Header and Frame Length
*-----------------------------------------------------------------*/
pt_stream = stream;
FOR( i = 0; i < ( 2 + MAX_BITS_PER_FRAME ); ++i )
{
stream[i] = 0;
move16();
}
//*pt_stream++ = (Word16) SYNC_GOOD_FRAME;
//*pt_stream++ = hBstr->nb_bits_tot;
*num_bits = hBstr->nb_bits_tot;
move16();
/*----------------------------------------------------------------*
* Bitstream packing (conversion of individual indices into a serial stream)
* Writing the serial stream into file
*----------------------------------------------------------------*/
FOR( i = 0; i < MAX_NUM_INDICES; i++ )
{
IF( NE_16( hBstr->ind_list[i].nb_bits, -1 ) )
{
/* mask from MSB to LSB */
mask = L_shl( 1, ( sub( hBstr->ind_list[i].nb_bits, 1 ) ) );
/* write bit by bit */
FOR( k = 0; k < hBstr->ind_list[i].nb_bits; k++ )
{
IF( L_and( hBstr->ind_list[i].value, mask ) )
{
//*pt_stream++ = G192_BIN1;
*pt_stream++ = 1;
move16();
}
ELSE
{
//*pt_stream++ = G192_BIN0;
*pt_stream++ = 0;
move16();
}
mask = L_shr( mask, 1 );
}
}
}
}
ELSE
{
/* Create and write ToC header */
/* qbit always set to 1 on encoder side for AMRWBIO , no qbit in use for EVS, but set to 0(bad) */
header = (UWord8) ( s_or( s_or( shl( st_fx->Opt_AMR_WB, 5 ), shl( st_fx->Opt_AMR_WB, 4 ) ), rate2EVSmode( i_mult( hBstr->nb_bits_tot, 50 ), &isAmrWb ) ) );
// fwrite(&header, sizeof(UWord8), 1, file);
memcpy( out_buf, &header, sizeof( UWord8 ) );
*num_bits += sizeof( UWord8 );
/* Write speech bits */
// fwrite(pFrame, sizeof(UWord8), (pFrame_size + 7) >> 3, file);
memcpy( out_buf, pFrame, sizeof( UWord8 ) * ( shr( add( pFrame_size, 7 ), 3 ) ) );
*num_bits += sizeof( UWord8 ) * ( shr( ( add( pFrame_size, 7 ) ), 3 ) );
}
/* Clearing of indices */
FOR( i = 0; i < MAX_NUM_INDICES; i++ )
{
hBstr->ind_list[i].nb_bits = -1;
move16();
}
IF( st_fx->bitstreamformat == G192 )
{
/* write the serial stream into file */
// fwrite(stream, sizeof(unsigned short), 2 + stream[1], file);
// FILE *ftemp = fopen( "./output/bitstreams/out.COD", "ab" );
// fwrite( stream, sizeof( unsigned short ), 2 + stream[1], ftemp );
// fclose( ftemp );
memcpy( out_buf, stream, sizeof( unsigned short ) * ( *num_bits ) );
//*num_bits += sizeof( unsigned short ) * ( 2 + stream[1] );
}
/* reset index pointers */
hBstr->nb_bits_tot = 0;
hBstr->next_ind_fx = 0;
hBstr->last_ind_fx = -1;
move16();
move16();
move16();
return;
}
#endif
/*-------------------------------------------------------------------*
* indices_to_serial()
*
......
This diff is collapsed.
......@@ -45,20 +45,22 @@
#define MATRIX_CONSTANT (759250113)
#define NUM_SAMPLES_960 (960)
#define NUM_SAMPLES_720 (720)
#define NUM_SAMPLES_640 (640)
#define NUM_SAMPLES_320 (320)
#define NUM_SAMPLES_160 (160)
#define L_SUBFRAME_48k (240)
#define L_SUBFRAME_32k (180)
#define L_SUBFRAME_16k (80)
#define L_SUBFRAME_8k (40)
#define Q31_BY_NUM_SAMPLES_960 ( 2239294 )
#define Q31_BY_NUM_SAMPLES_720 ( 2986764 )
#define Q31_BY_NUM_SAMPLES_320 ( 6731924 )
#define Q31_BY_NUM_SAMPLES_160 ( 13506186 )
#define Q31_BY_SUB_FRAME_240 ( 8985287 )
#define Q31_BY_SUB_FRAME_180 ( 11997115 )
#define Q31_BY_SUB_FRAME_80 ( 27183337 )
#define Q31_BY_SUB_FRAME_40 ( 55063683 )
#define Q31_BY_NUM_SAMPLES_960 ( 2239294 ) /* 1/959 in Q31 */
#define Q31_BY_NUM_SAMPLES_720 ( 2986764 ) /* 1/719 in Q31 */
#define Q31_BY_NUM_SAMPLES_640 ( 3360694 ) /* 1/639 in Q31 */
#define Q31_BY_NUM_SAMPLES_320 ( 6731924 ) /* 1/319 in Q31 */
#define Q31_BY_NUM_SAMPLES_160 ( 13506186 ) /* 1/159 in Q31 */
#define Q31_BY_SUB_FRAME_240 ( 8985287 ) /* 1/239 in Q31 */
#define Q31_BY_SUB_FRAME_180 ( 11997115 ) /* 1/179 in Q31 */
#define Q31_BY_SUB_FRAME_80 ( 27183337 ) /* 1/79 in Q31 */
#define Q31_BY_SUB_FRAME_40 ( 55063683 ) /* 1/39 in Q31 */
/*----------------------------------------------------------------------------------*
......@@ -132,7 +134,7 @@
#define ONE_IN_Q29 536870912
#define ONE_IN_Q30 1073741824
#define ONE_IN_Q31 0x7fffffff
#define MINUS_ONE_IN_Q31 -2147483648
#define MINUS_ONE_IN_Q31 (-2147483647 - 1)
#define TWO_IN_Q29 1073741824
#define FOUR_IN_Q28 1073741824
......@@ -225,6 +227,7 @@ enum{
#define ONE_BY_L_FRAME32k_Q31 3355443
#define ONE_BY_L_FRAME25_6k_Q31 4194304
#define ONE_BY_L_FRAME16k_Q31 6710886
#define ONE_BY_L_FRAME16k_Q15 102
#define ONE_BY_L_FRAME_Q31 8388608
#define ONE_BY_240_Q31 8947849
#define ONE_BY_L_FRAME8k_Q31 13421772
......@@ -533,15 +536,7 @@ enum
IND_STEREO_ICBWE_MSFLAG,
IND_SHB_ENER_SF,
IND_SHB_RES_GS,
#ifndef FIX_1486_IND_SHB_RES
IND_SHB_RES_GS1,
IND_SHB_RES_GS2,
IND_SHB_RES_GS3,
IND_SHB_RES_GS4,
IND_SHB_VF,
#else
IND_SHB_VF = IND_SHB_RES_GS + 5,
#endif
IND_SHB_LSF,
IND_SHB_MIRROR = IND_SHB_LSF + 5,
IND_SHB_GRID,
......@@ -688,6 +683,7 @@ enum
#define NUM_DCT_LENGTH 24
#define NB_DIV 2 /* number of division (subframes) per 20ms frame */
#define NB_DIV_LOG2 1 /* To be used for shift operation instead of division */
#define L_MDCT_HALF_OVLP_MAX L_MDCT_OVLP_MAX - 48000 / 200 /* Size of HALF overlap window slope @ 48 kHz */
#define L_MDCT_MIN_OVLP_MAX 60 /* Size of the MDCT minimal overlap @ 48 kHz - 1.25ms */
#define L_MDCT_TRANS_OVLP_MAX NS2SA( 48000, ACELP_TCX_TRANS_NS ) /* Size of the ACELP->MDCT transition overlap - 1.25ms */
......@@ -716,6 +712,7 @@ enum
#define L_FRAME 256 /* frame size at 12.8kHz */
#define NB_SUBFR 4 /* number of subframes per frame */
#define L_SUBFR ( L_FRAME / NB_SUBFR ) /* subframe size */
#define L_SUBFR_LOG2 6 /* To be used for shift operation instead of division */
#define L_SUBFR_Q6 ((L_FRAME/NB_SUBFR)*64) /* subframe size */
#define L_SUBFR_Q16 ((L_FRAME/NB_SUBFR)*65536) /* subframe size */
......@@ -761,6 +758,7 @@ enum
#define PIT_FIR_SIZE6_2 ( PIT_UP_SAMP6 * PIT_L_INTERPOL6_2 + 1 )
#define E_MIN 0.0035f /* minimum allowable energy */
#define E_MIN_Q11_FX 7 /* minimum allowable energy in Q11*/
#define E_MIN_Q27_FX 469762 /* minimum allowable energy in Q27*/
#define STEP_DELTA 0.0625f /* quantization step for tilt compensation of gaussian cb. excitation */
#define GAMMA_EV 0.92f /* weighting factor for core synthesis error weighting */
#define FORMANT_SHARPENING_NOISE_THRESHOLD 21.0f /* lp_noise level above which formant sharpening is deactivated */
......@@ -795,6 +793,7 @@ enum
#endif
#define CLDFB_NO_CHANNELS_MAX_FX 30720 /*Q9*/
#define CLDFB_NO_COL_MAX 16 /* CLDFB resampling - max number of CLDFB col., == IVAS_CLDFB_NO_COL_MAX */
#define CLDFB_NO_COL_MAX_LOG2 4 /* To be used for shift operation instead of division */
#define ONE_BY_CLDFB_NO_COL_MAX_Q31 134217728
#define CLDFB_NO_COL_MAX_SWITCH 6 /* CLDFB resampling - max number of CLDFB col. for switching */
#define CLDFB_NO_COL_MAX_SWITCH_BFI 10 /* CLDFB resampling - max number of CLDFB col. for switching, BFI */
......@@ -1541,6 +1540,7 @@ enum
#define INTERP_3_2_MEM_LEN 15
#define L_SHB_LAHEAD 20 /* Size of lookahead for SHB */
#define NUM_SHB_SUBFR 16
#define NUM_SHB_SUBFR_LOG2 4 /* To be used for shift operation instead of division */
#define LPC_SHB_ORDER 10
#define LPC_WHTN_ORDER 4 /* Order of whitening filter for SHB excitation */
#define SHB_OVERLAP_LEN ( L_FRAME16k - L_SHB_LAHEAD ) / ( NUM_SHB_SUBFR - 1 )
......@@ -3103,6 +3103,7 @@ extern const Word16 Idx2Freq_Tbl[];
#define FS_16K_IN_NS_Q31 34360
#define ONE_BY_THREE_Q15 10923 /* 1/3.f in Q15 */
#define ONE_BY_TEN_Q15 3277 /* 1/10.f in Q15 */
#define THREE_Q21 6291456
#define SIX_Q21 12582912
......
......@@ -182,9 +182,7 @@ typedef struct TDREND_HRFILT_FiltSet_struct *IVAS_DEC_HRTF_HANDLE;
typedef struct ivas_hrtfs_crend_structure *IVAS_DEC_HRTF_CREND_HANDLE;
typedef struct ivas_hrtfs_fastconv_struct *IVAS_DEC_HRTF_FASTCONV_HANDLE;
typedef struct ivas_hrtfs_parambin_struct *IVAS_DEC_HRTF_PARAMBIN_HANDLE;
#ifdef NONBE_FIX_922_PRECOMPUTED_HRTF_PROPERTIES
typedef struct ivas_hrtfs_statistics_struct *IVAS_DEC_HRTF_STATISTICS_HANDLE;
#endif
typedef struct cldfb_filter_bank_struct *IVAS_CLDFB_FILTER_BANK_HANDLE;
typedef struct ivas_LS_setup_custom *IVAS_LSSETUP_CUSTOM_HANDLE;
......
......@@ -197,7 +197,11 @@ void E_UTIL_deemph2( Word16 shift, Word16 *x, const Word16 mu, const Word16 L, W
{
FOR( i = 0; i < L; i++ )
{
#ifdef ISSUE_1772_replace_shr_o
L_tmp = L_msu_o( Mpy_32_16_1( L_tmp, mu ), shr_sat( x[i], shift ), -32768 /*1.0f in Q15*/, &Overflow ); /*Qx-shift+16*/
#else
L_tmp = L_msu_o( Mpy_32_16_1( L_tmp, mu ), shr_o( x[i], shift, &Overflow ), -32768 /*1.0f in Q15*/, &Overflow ); /*Qx-shift+16*/
#endif
x[i] = round_fx_o( L_tmp, &Overflow ); /*Qx-shift*/
move16();
}
......
......@@ -299,10 +299,11 @@ void enhancer_fx(
/* RETURN ARGUMENTS : */
/* _ None */
/*======================================================================================*/
void enhancer_ivas_fx(
const Word16 codec_mode, /* i : flag indicating Codec Mode */
const Word32 core_brate, /* i : decoder bitrate */
const Word16 cbk_index, /* i : */
const Word16 cbk_index,
const Word16 Opt_AMR_WB, /* i : flag indicating AMR-WB IO mode */
const Word16 coder_type, /* i : coder type */
const Word16 i_subfr, /* i : subframe number */
......@@ -322,9 +323,11 @@ void enhancer_ivas_fx(
Word16 tmp, fac, *pt_exc2;
Word16 i;
Word32 L_tmp;
Word32 L_tmp1, L_tmp2;
Word16 gain_code_hi;
Word16 pit_sharp, tmp16;
Word16 excp[L_SUBFR], sc;
Word64 w_temp;
pit_sharp = gain_pit;
......@@ -400,215 +403,6 @@ void enhancer_ivas_fx(
phase_dispersion_fx( norm_gain_code, gain_pit, code, i, dm_fx );
/*------------------------------------------------------------
* noise enhancer
*
* - Enhance excitation on noise. (modify gain of code)
* If signal is noisy and LPC filter is stable, move gain
* of code 1.5 dB toward gain of code threshold.
* This decreases by 3 dB noise energy variation.
*-----------------------------------------------------------*/
/* tmp = 0.5f * (1.0f - voice_fac) */
tmp = msu_r_sat( 0x40000000 /*0.5.Q31*/, voice_fac, 16384 /*0.5.Q14*/ ); /*Q15 */ /* 1=unvoiced, 0=voiced */
/* fac = stab_fac * tmp */
fac = mult( stab_fac, tmp ); /*Q15*/
IF( LT_32( norm_gain_code, *gc_threshold ) )
{
L_tmp = Madd_32_16( norm_gain_code, norm_gain_code, 6226 /*0.19.Q15*/ ); /*Q16 */
L_tmp = L_min( L_tmp, *gc_threshold ); /*Q16 */
}
ELSE
{
L_tmp = Mult_32_16( norm_gain_code, 27536 /*0.84.Q15*/ ); /*Q16 */
L_tmp = L_max( L_tmp, *gc_threshold ); /*Q16 */
}
*gc_threshold = L_tmp;
move32(); /*Q16 */
/* gain_code = (fac * tmp) + (1.0 - fac) * gain_code ==> fac * (tmp - gain_code) + gain_code */
L_tmp = L_sub( L_tmp, norm_gain_code ); /*Q16 */
norm_gain_code = Madd_32_16( norm_gain_code, L_tmp, fac ); /*Q16 */
/* gain_code *= gain_inov - Inverse the normalization */
L_tmp = Mult_32_16( norm_gain_code, gain_inov ); /*Q13*/ /* gain_inov in Q12 */
sc = 6;
move16();
gain_code_hi = round_fx( L_shl( L_tmp, add( Q_exc, 3 ) ) ); /* in Q_exc */
/*------------------------------------------------------------*
* pitch enhancer
*
* - Enhance excitation on voiced. (HP filtering of code)
* On voiced signal, filtering of code by a smooth fir HP
* filter to decrease energy of code at low frequency.
*------------------------------------------------------------*/
test();
IF( !Opt_AMR_WB && EQ_16( coder_type, UNVOICED ) )
{
/* Copy(code, exc2, L_SUBFR) */
FOR( i = 0; i < L_SUBFR; i++ )
{
pt_exc2[i] = round_fx( L_shl( L_mult( gain_code_hi, code[i] ), sc ) ); /*Q0 */ /* code in Q12 (Q9 for encoder) */
move16();
}
}
ELSE
{
test();
test();
IF( Opt_AMR_WB && ( EQ_32( core_brate, ACELP_8k85 ) || EQ_32( core_brate, ACELP_6k60 ) ) )
{
pit_sharp = shl_sat( gain_pit, 1 ); /* saturation can occur here Q14 -> Q15 */
/* saturation takes care of "if (pit_sharp > 1.0) { pit_sharp=1.0; }" */
IF( GT_16( pit_sharp, 16384 /*0.5.Q15*/ ) )
{
tmp16 = mult( pit_sharp, 8192 /*0.25.Q15*/ );
FOR( i = 0; i < L_SUBFR; i++ )
{
/* excp[i] = pt_exc2[i] * pit_sharp * 0.25 */
excp[i] = mult_r( pt_exc2[i], tmp16 );
move16();
}
}
}
IF( EQ_16( L_frame, L_FRAME16k ) )
{
/* tmp = 0.150 * (1.0 + voice_fac) */
/* 0.30=voiced, 0=unvoiced */
tmp = mac_r( 0x13333333L /*0.150.Q31*/, voice_fac, 4915 /*0.150.Q15*/ ); /*Q15 */
}
ELSE
{
/* tmp = 0.125 * (1.0 + voice_fac) */
/* 0.25=voiced, 0=unvoiced */
tmp = mac_r( 0x10000000L /*0.125.Q31*/, voice_fac, 4096 ); /*Q15 */
}
/*-----------------------------------------------------------------
* Do a simple noncasual "sharpening": effectively an FIR
* filter with coefs [-tmp 1.0 -tmp] where tmp=0...0.25.
* This is applied to code and add_fxed to exc2
*-----------------------------------------------------------------*/
/* pt_exc2[0] += code[0] - tmp * code[1] */
L_tmp = L_deposit_h( code[0] ); /* if Enc :Q9 * Q15 -> Q25 */
L_tmp = L_msu( L_tmp, code[1], tmp ); /* Q12 * Q15 -> Q28 */
L_tmp = L_shl( L_mult( gain_code_hi, extract_h( L_tmp ) ), sc );
pt_exc2[0] = msu_r( L_tmp, -32768, pt_exc2[0] );
move16(); /* in Q_exc */
FOR( i = 1; i < L_SUBFR - 1; i++ )
{
/* pt_exc2[i] += code[i] - tmp * code[i-1] - tmp * code[i+1] */
L_tmp = L_msu( -32768, code[i], -32768 );
L_tmp = L_msu( L_tmp, code[i + 1], tmp );
tmp16 = msu_r( L_tmp, code[i - 1], tmp );
L_tmp = L_shl( L_mult( gain_code_hi, tmp16 ), sc );
pt_exc2[i] = msu_r_sat( L_tmp, -32768, pt_exc2[i] );
move16(); /* in Q_exc */
}
/* pt_exc2[L_SUBFR-1] += code[L_SUBFR-1] - tmp * code[L_SUBFR-2] */
L_tmp = L_deposit_h( code[L_SUBFR - 1] ); /*Q28 */
L_tmp = L_msu( L_tmp, code[L_SUBFR - 2], tmp ); /*Q28 */
L_tmp = L_shl( L_mult( gain_code_hi, extract_h( L_tmp ) ), sc );
pt_exc2[L_SUBFR - 1] = msu_r( L_tmp, -32768, pt_exc2[L_SUBFR - 1] );
move16(); /* in Q_exc */
test();
test();
IF( Opt_AMR_WB && ( EQ_32( core_brate, ACELP_8k85 ) || EQ_32( core_brate, ACELP_6k60 ) ) )
{
IF( GT_16( pit_sharp, 16384 /*0.5.Q14*/ ) )
{
FOR( i = 0; i < L_SUBFR; i++ )
{
/* excp[i] += pt_exc2[i] */
excp[i] = add( excp[i], pt_exc2[i] );
move16();
}
agc2_fx( pt_exc2, excp, L_SUBFR );
Copy( excp, pt_exc2, L_SUBFR );
}
}
}
}
void enhancer_ivas_fx2(
const Word32 core_brate, /* i : decoder bitrate */
const Word16 Opt_AMR_WB, /* i : flag indicating AMR-WB IO mode */
const Word16 coder_type, /* i : coder type */
const Word16 i_subfr, /* i : subframe number */
const Word16 L_frame, /* i : frame size */
const Word16 voice_fac, /* i : subframe voicing estimation Q15 */
const Word16 stab_fac, /* i : LP filter stablility measure Q15 */
Word32 norm_gain_code, /* i : normalised innovative cb. gain Q16 */
const Word16 gain_inov, /* i : gain of the unscaled innovation Q12 */
Word32 *gc_threshold, /* i/o: gain code threshold Q16 */
Word16 *code, /* i/o: innovation Q12 */
Word16 *exc2, /* i/o: adapt. excitation/total exc. Q_exc*/
const Word16 gain_pit, /* i : quantized pitch gain Q14 */
struct dispMem_fx *dm_fx, /* i/o: phase dispersion algorithm memory */
const Word16 Q_exc /* i : Q of the excitation */
)
{
Word16 tmp, fac, *pt_exc2;
Word16 i;
Word32 L_tmp;
Word32 L_tmp1, L_tmp2;
Word16 gain_code_hi;
Word16 pit_sharp, tmp16;
Word16 excp[L_SUBFR], sc;
Word64 w_temp;
pit_sharp = gain_pit;
move16(); /* to remove gcc warning */
pt_exc2 = exc2 + i_subfr;
/*------------------------------------------------------------*
* Phase dispersion to enhance noise at low bit rate
*------------------------------------------------------------*/
i = 2;
move16(); /* no dispersion */
IF( Opt_AMR_WB )
{
IF( LE_32( core_brate, ACELP_6k60 ) )
{
i = 0;
move16(); /* high dispersion */
}
ELSE IF( LE_32( core_brate, ACELP_8k85 ) )
{
i = 1;
move16(); /* low dispersion */
}
}
ELSE IF( NE_16( coder_type, UNVOICED ) )
{
test();
test();
test();
test();
IF( LE_32( core_brate, ACELP_7k20 ) )
{
i = 0;
move16(); /* high dispersion */
}
ELSE IF( ( EQ_16( coder_type, GENERIC ) || EQ_16( coder_type, TRANSITION ) || EQ_16( coder_type, AUDIO ) || coder_type == INACTIVE ) && LE_32( core_brate, ACELP_9k60 ) )
{
i = 1;
move16(); /* low dispersion */
}
}
phase_dispersion_fx( norm_gain_code, gain_pit, code, i, dm_fx );
/*------------------------------------------------------------
* noise enhancer
*
......@@ -656,7 +450,8 @@ void enhancer_ivas_fx2(
* filter to decrease energy of code at low frequency.
*------------------------------------------------------------*/
test();
IF( !Opt_AMR_WB && EQ_16( coder_type, UNVOICED ) )
test();
IF( !Opt_AMR_WB && EQ_16( codec_mode, MODE1 ) && EQ_16( coder_type, UNVOICED ) )
{
/* Copy(code, exc2, L_SUBFR) */
FOR( i = 0; i < L_SUBFR; i++ )
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.