From 14e3697e827be3335fd55b4a814ea5ac71ffad7e Mon Sep 17 00:00:00 2001 From: Sandesh Venkatesh Date: Thu, 5 Jun 2025 08:43:10 +0530 Subject: [PATCH 1/7] Fix for 3GPP issue 1020: ParamISM: Binaural: HF click Link #1020 --- lib_dec/acelp_core_dec_fx.c | 12 ++++++++-- lib_dec/stat_dec.h | 3 ++- lib_dec/swb_tbe_dec_fx.c | 46 ++++++++++++++++++++++--------------- lib_dec/updt_dec_fx.c | 2 ++ 4 files changed, 41 insertions(+), 22 deletions(-) diff --git a/lib_dec/acelp_core_dec_fx.c b/lib_dec/acelp_core_dec_fx.c index fd0c8ce2d..37d284346 100644 --- a/lib_dec/acelp_core_dec_fx.c +++ b/lib_dec/acelp_core_dec_fx.c @@ -2326,6 +2326,11 @@ ivas_error acelp_core_dec_fx( st->hBWE_TD->bwe_non_lin_prev_scale_fx = 0; move32(); set16_fx( st->hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); + if ( NE_16( st->element_mode, EVS_MONO ) ) + { + st->hBWE_TD->q_old_bwe_exc_extended_fx = 0; + move16(); + } } test(); @@ -2341,9 +2346,12 @@ ivas_error acelp_core_dec_fx( } ELSE { - Copy_Scale_sig_16_32_no_sat( st->hBWE_TD->old_bwe_exc_extended_fx, bwe_exc_extended_fx, NL_BUFF_OFFSET, ( sub( shl( st->Q_exc, 1 ), sub( st->prev_Q_bwe_exc, 16 ) ) ) ); // prev_Q_bwe_exc + Copy_Scale_sig_16_32_no_sat( st->hBWE_TD->old_bwe_exc_extended_fx, bwe_exc_extended_fx, NL_BUFF_OFFSET, ( sub( shl( st->Q_exc, 1 ), st->hBWE_TD->q_old_bwe_exc_extended_fx ) ) ); /* Q(q_old_bwe_exc_extended_fx) -> Q(2 * Q_exc) */ non_linearity_ivas_fx( bwe_exc_fx, bwe_exc_extended_fx + NL_BUFF_OFFSET, L_FRAME32k, &st->hBWE_TD->bwe_non_lin_prev_scale_fx, st->Q_exc, st->coder_type, voice_factors_fx, st->L_frame ); - Copy_Scale_sig_32_16( bwe_exc_extended_fx + L_FRAME32k, st->hBWE_TD->old_bwe_exc_extended_fx, NL_BUFF_OFFSET, negate( sub( shl( st->Q_exc, 1 ), sub( st->prev_Q_bwe_exc, 16 ) ) ) ); // prev_Q_bwe_exc + exp = sub( L_norm_arr( bwe_exc_extended_fx + L_FRAME32k, NL_BUFF_OFFSET ), 16 ); + Copy_Scale_sig_32_16( bwe_exc_extended_fx + L_FRAME32k, st->hBWE_TD->old_bwe_exc_extended_fx, NL_BUFF_OFFSET, exp ); /* Q(2 * Q_exc) -> Q(q_old_bwe_exc_extended_fx) */ + st->hBWE_TD->q_old_bwe_exc_extended_fx = add( shl( st->Q_exc, 1 ), exp ); + move16(); } } diff --git a/lib_dec/stat_dec.h b/lib_dec/stat_dec.h index 4e4e33c36..28a97f072 100644 --- a/lib_dec/stat_dec.h +++ b/lib_dec/stat_dec.h @@ -822,7 +822,8 @@ typedef struct td_bwe_dec_structure Word16 old_bwe_exc_fx[PIT16k_MAX * 2]; /*Q_exc*/ Word16 bwe_seed[2]; Word32 bwe_non_lin_prev_scale_fx; - Word16 old_bwe_exc_extended_fx[NL_BUFF_OFFSET]; + Word16 old_bwe_exc_extended_fx[NL_BUFF_OFFSET]; /* EVS : Q(prev_Q_bwe_exc - 16), IVAS : Q(q_old_bwe_exc_extended_fx) */ + Word16 q_old_bwe_exc_extended_fx; Word32 genSHBsynth_Hilbert_Mem_fx[HILBERT_MEM_SIZE]; diff --git a/lib_dec/swb_tbe_dec_fx.c b/lib_dec/swb_tbe_dec_fx.c index bd8700063..b005a88bf 100644 --- a/lib_dec/swb_tbe_dec_fx.c +++ b/lib_dec/swb_tbe_dec_fx.c @@ -56,10 +56,13 @@ static void find_max_mem_dec( hBWE_TD = st_fx->hBWE_TD; /* old BWE exc max */ - FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + IF( EQ_16( st_fx->element_mode, EVS_MONO ) ) { - tempQ15 = abs_s( hBWE_TD->old_bwe_exc_extended_fx[i] ); - max = s_max( max, tempQ15 ); + FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + { + tempQ15 = abs_s( hBWE_TD->old_bwe_exc_extended_fx[i] ); + max = s_max( max, tempQ15 ); + } } /* decimate all-pass steep memory */ @@ -232,10 +235,13 @@ static void rescale_genSHB_mem_dec( TD_BWE_DEC_HANDLE hBWE_TD; hBWE_TD = st_fx->hBWE_TD; - FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + IF( EQ_16( st_fx->element_mode, EVS_MONO ) ) { - hBWE_TD->old_bwe_exc_extended_fx[i] = shl( hBWE_TD->old_bwe_exc_extended_fx[i], sf ); - move16(); + FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + { + hBWE_TD->old_bwe_exc_extended_fx[i] = shl( hBWE_TD->old_bwe_exc_extended_fx[i], sf ); + move16(); + } } FOR( i = 0; i < 7; i++ ) @@ -283,9 +289,12 @@ void find_max_mem_wb( Decoder_State *st_fx, Word16 *n_mem ) TD_BWE_DEC_HANDLE hBWE_TD; hBWE_TD = st_fx->hBWE_TD; - FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + IF( EQ_16( st_fx->element_mode, EVS_MONO ) ) { - max = s_max( max, abs_s( hBWE_TD->old_bwe_exc_extended_fx[i] ) ); + FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + { + max = s_max( max, abs_s( hBWE_TD->old_bwe_exc_extended_fx[i] ) ); + } } FOR( i = 0; i < 7; i++ ) { @@ -362,10 +371,13 @@ void rescale_genWB_mem( Decoder_State *st_fx, Word16 sf ) TD_BWE_DEC_HANDLE hBWE_TD; hBWE_TD = st_fx->hBWE_TD; - FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + IF( EQ_16( st_fx->element_mode, EVS_MONO ) ) { - hBWE_TD->old_bwe_exc_extended_fx[i] = shl( hBWE_TD->old_bwe_exc_extended_fx[i], sf ); - move16(); + FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) + { + hBWE_TD->old_bwe_exc_extended_fx[i] = shl( hBWE_TD->old_bwe_exc_extended_fx[i], sf ); + move16(); + } } FOR( i = 0; i < 10; i++ ) @@ -418,6 +430,8 @@ static void InitSWBdecBuffer_fx( move16(); set16_fx( hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); + hBWE_TD->q_old_bwe_exc_extended_fx = 0; + move16(); hBWE_TD->bwe_non_lin_prev_scale_fx = 0; move16(); @@ -5375,12 +5389,6 @@ static void rescale_genSHB_mem_dec_ivas( TD_BWE_DEC_HANDLE hBWE_TD; hBWE_TD = st_fx->hBWE_TD; - FOR( i = 0; i < NL_BUFF_OFFSET; i++ ) - { - hBWE_TD->old_bwe_exc_extended_fx[i] = shl( hBWE_TD->old_bwe_exc_extended_fx[i], sf ); - move16(); - } - FOR( i = 0; i < 7; i++ ) { hBWE_TD->mem_genSHBexc_filt_down_shb_fx[i] = shl( hBWE_TD->mem_genSHBexc_filt_down_shb_fx[i], sf ); @@ -6147,7 +6155,7 @@ void ivas_swb_tbe_dec_fx( /* ----- calculate optimum Q_bwe_exc and rescale memories accordingly ----- */ Lmax = 0; move32(); - FOR( cnt = 0; cnt < L_FRAME32k; cnt++ ) + FOR( cnt = 0; cnt < L_FRAME32k + NL_BUFF_OFFSET; cnt++ ) { Lmax = L_max( Lmax, L_abs( bwe_exc_extended_fx[cnt] ) ); } @@ -6177,7 +6185,7 @@ void ivas_swb_tbe_dec_fx( /* rescale the bwe_exc_extended and bring it to 16-bit single precision with dynamic norm */ sc = sub( Q_bwe_exc, add( Q_exc, Q_exc ) ); - FOR( cnt = 0; cnt < L_FRAME32k; cnt++ ) + FOR( cnt = 0; cnt < L_FRAME32k + NL_BUFF_OFFSET; cnt++ ) { bwe_exc_extended_16[cnt] = round_fx_sat( L_shl_sat( bwe_exc_extended_fx[cnt], sc ) ); move16(); diff --git a/lib_dec/updt_dec_fx.c b/lib_dec/updt_dec_fx.c index 81046fb19..7af4dec1a 100644 --- a/lib_dec/updt_dec_fx.c +++ b/lib_dec/updt_dec_fx.c @@ -240,6 +240,8 @@ void updt_IO_switch_dec_fx( { set16_fx( hBWE_TD->old_bwe_exc_fx, 0, PIT16k_MAX * 2 ); set16_fx( hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); + hBWE_TD->q_old_bwe_exc_extended_fx = 0; + move16(); hBWE_TD->bwe_non_lin_prev_scale_fx = 0; move16(); -- GitLab From b7cb47dc09e7ce88dc1f2552bbb09dd88076c748 Mon Sep 17 00:00:00 2001 From: Sandesh Venkatesh Date: Thu, 5 Jun 2025 12:47:31 +0530 Subject: [PATCH 2/7] Review comments addressing --- lib_dec/acelp_core_dec_fx.c | 2 +- lib_dec/swb_tbe_dec_fx.c | 2 +- lib_dec/updt_dec_fx.c | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib_dec/acelp_core_dec_fx.c b/lib_dec/acelp_core_dec_fx.c index 37d284346..847e16594 100644 --- a/lib_dec/acelp_core_dec_fx.c +++ b/lib_dec/acelp_core_dec_fx.c @@ -2328,7 +2328,7 @@ ivas_error acelp_core_dec_fx( set16_fx( st->hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); if ( NE_16( st->element_mode, EVS_MONO ) ) { - st->hBWE_TD->q_old_bwe_exc_extended_fx = 0; + st->hBWE_TD->q_old_bwe_exc_extended_fx = Q15; move16(); } } diff --git a/lib_dec/swb_tbe_dec_fx.c b/lib_dec/swb_tbe_dec_fx.c index b005a88bf..1234e2853 100644 --- a/lib_dec/swb_tbe_dec_fx.c +++ b/lib_dec/swb_tbe_dec_fx.c @@ -430,7 +430,7 @@ static void InitSWBdecBuffer_fx( move16(); set16_fx( hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); - hBWE_TD->q_old_bwe_exc_extended_fx = 0; + hBWE_TD->q_old_bwe_exc_extended_fx = Q15; move16(); hBWE_TD->bwe_non_lin_prev_scale_fx = 0; move16(); diff --git a/lib_dec/updt_dec_fx.c b/lib_dec/updt_dec_fx.c index 7af4dec1a..81046fb19 100644 --- a/lib_dec/updt_dec_fx.c +++ b/lib_dec/updt_dec_fx.c @@ -240,8 +240,6 @@ void updt_IO_switch_dec_fx( { set16_fx( hBWE_TD->old_bwe_exc_fx, 0, PIT16k_MAX * 2 ); set16_fx( hBWE_TD->old_bwe_exc_extended_fx, 0, NL_BUFF_OFFSET ); - hBWE_TD->q_old_bwe_exc_extended_fx = 0; - move16(); hBWE_TD->bwe_non_lin_prev_scale_fx = 0; move16(); -- GitLab From df73a2db76f10d0b32d8031be582b252ca089df3 Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Fri, 6 Jun 2025 14:49:03 +0200 Subject: [PATCH 3/7] deactivate jobs for faster testing --- .gitlab-ci.yml | 30 ++-- README.md | 435 ++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 445 insertions(+), 20 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9f37f50f2..15dad4ea3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -838,7 +838,7 @@ build-codec-linux-make: - make -j # ensure that codec builds on linux with instrumentation active -build-codec-linux-instrumented-make: +.build-codec-linux-instrumented-make: rules: - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -855,7 +855,7 @@ build-codec-linux-instrumented-make: - bash scripts/prepare_instrumentation.sh -m MEM_ONLY - make -j -C $INSTR_DIR -build-codec-linux-debugging-make: +.build-codec-linux-debugging-make: rules: - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -873,7 +873,7 @@ build-codec-linux-debugging-make: - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/activate-debug-mode-info-if-set.sh - make -j -build-codec-windows-msbuild: +.build-codec-windows-msbuild: rules: - if: $CI_PIPELINE_SOURCE == 'web' - if: $CI_PIPELINE_SOURCE == 'push' && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -896,7 +896,7 @@ build-codec-windows-msbuild: # --------------------------------------------------------------- ### jobs that check for bitexactness of fx encoder and decoder -check-be-to-target-short-enc-0db: +.check-be-to-target-short-enc-0db: extends: - .check-be-to-target-job variables: @@ -907,7 +907,7 @@ check-be-to-target-short-enc-0db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -check-be-to-target-short-enc-+10db: +.check-be-to-target-short-enc-+10db: extends: - .check-be-to-target-job variables: @@ -918,7 +918,7 @@ check-be-to-target-short-enc-+10db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -check-be-to-target-short-enc--10db: +.check-be-to-target-short-enc--10db: extends: - .check-be-to-target-job variables: @@ -940,7 +940,7 @@ check-be-to-target-short-dec-0db: DUT_ENCODER_PATH: "$REF_ENCODER_PATH" MERGE_TARGET_ENCODER_PATH: "$REF_ENCODER_PATH" -check-be-to-target-short-dec-+10db: +.check-be-to-target-short-dec-+10db: extends: - .check-be-to-target-job variables: @@ -951,7 +951,7 @@ check-be-to-target-short-dec-+10db: DUT_ENCODER_PATH: "$REF_ENCODER_PATH" MERGE_TARGET_ENCODER_PATH: "$REF_ENCODER_PATH" -check-be-to-target-short-dec--10db: +.check-be-to-target-short-dec--10db: extends: - .check-be-to-target-job variables: @@ -963,7 +963,7 @@ check-be-to-target-short-dec--10db: MERGE_TARGET_ENCODER_PATH: "$REF_ENCODER_PATH" ### jobs that check for regressions on non-BE testcases -check-regressions-short-enc-0db: +.check-regressions-short-enc-0db: extends: - .check-regressions-pytest-job needs: @@ -977,7 +977,7 @@ check-regressions-short-enc-0db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -check-regressions-short-enc-+10db: +.check-regressions-short-enc-+10db: extends: - .check-regressions-pytest-job needs: @@ -991,7 +991,7 @@ check-regressions-short-enc-+10db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -check-regressions-short-enc--10db: +.check-regressions-short-enc--10db: extends: - .check-regressions-pytest-job needs: @@ -1005,7 +1005,7 @@ check-regressions-short-enc--10db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -check-regressions-short-dec-0db: +.check-regressions-short-dec-0db: extends: - .check-regressions-pytest-job needs: @@ -1019,7 +1019,7 @@ check-regressions-short-dec-0db: DUT_ENCODER_PATH: "$REF_ENCODER_PATH" MERGE_TARGET_ENCODER_PATH: "$REF_ENCODER_PATH" -check-regressions-short-dec-+10db: +.check-regressions-short-dec-+10db: extends: - .check-regressions-pytest-job needs: @@ -1033,7 +1033,7 @@ check-regressions-short-dec-+10db: DUT_ENCODER_PATH: "$REF_ENCODER_PATH" MERGE_TARGET_ENCODER_PATH: "$REF_ENCODER_PATH" -check-regressions-short-dec--10db: +.check-regressions-short-dec--10db: extends: - .check-regressions-pytest-job needs: @@ -1437,7 +1437,7 @@ coverage-test-on-main-scheduled: # --------------------------------------------------------------- # check bitexactness to EVS -be-2-evs-26444: +.be-2-evs-26444: extends: - .test-job-linux rules: diff --git a/README.md b/README.md index d07d3edab..3698ef45c 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,433 @@ -# IVAS BASOP - 3GPP SA4 WG + + +--- +title: Python scripts for Testing the IVAS code and Generating test items +--- +# Python scripts for Testing the IVAS code and Generating test items +## Contents +- [Python scripts for Testing the IVAS code and Generating test items](#python-scripts-for-testing-the-ivas-code-and-generating-test-items) + - [Contents](#contents) + - [0. Requirements](#0-requirements) + - [1. Scripts and classes for testing IVAS code](#1--scripts-and-classes-for-testing-ivas-code) + - [1.1 Classes](#11-classes) + - [1.2 Output directory structure](#12-output-directory-structure) + - [1.3 Scripts](#13-scripts) + - [Common commandline options for the scripts](#common-commandline-options-for-the-scripts) + - [`runIvasCodec.py`](#runivascodecpy) + - [`IvasBuildAndRun.py`](#ivasbuildandrunpy) + - [`IvasBuildAndRunChecks.py`](#ivasbuildandruncheckspy) + - [`testBitexact.py`](#testbitexactpy) + - [`self_test.py`](#self_testpy) + +--- + + +## 0. Requirements +- Python >= 3.7 +- numpy and scipy for `generate_test_items.py`, `testBitexact.py` and `self_test.py` +--- + +## 1. Scripts and classes for testing IVAS code + +### 1.1 Classes + +- IvasModeCollector.py + - IvasModeCollector: manages operating modes +- IvasModeRunner.py: + - IvasModeRunner(IvasModeCollector): run the encoder/decoder (multithreaded) +- IvasSvnBuilder.py: checks out and builds the source code + - IvasBuilder: build the source code + - IvasSvnBuilder(IvasBuilder): export and build the source code + - IvasBuilderAndRunner: (export and) build the code, run a number of specific configrations +- IvasModeAnalyzer.py: + - IvasModeAnalyzer(IvasModeRunner): parses log files, generates error info and creates complexity tables + +The classes rely on a set of configuration dictionaries which, some basic dictionaries are available as .json files. + +One dictionary defines the paths for necesarry AFSP and util binaries and the default input directories for items, and is similar to the configuration files given by `-p` to `runEvsCoded.pl`. As examples `FhG_linux.json`, `FhG_windows.json` and `FhG_macos.json` already exist. + +The other necessary dictionary contains the IVAS format and mode definitions. Also two default dictionaries (`ivas_modes.json` and `ivas_modes_debug.json`) already exist + +Examples of how to use this classes can be found in the Jupyter notebook `ivas_examples.ipynb` and in the scripts below + +### 1.2 Output directory structure + +Given output directories have a certain structure to avoid to much cluttering + +| `outdir/` | | | | +|-----------|------------------|-----------------------------|--------------------------------------------------| +| | `pcm/` | PCM file cache | | +| | `enc/` | Encoded bitstreams | | +| | `dec/` | Decoded waveforms | | +| | `logs/` | Encoder/Decoder/PCM logs | one log per generated PCM/bitstream/decoded waveform | +| | `IVAS_cod[.exe]` | Encoder binary | Only if a build step was involved | +| | `IVAS_dec[.exe]` | Decoder binary | Only if a build step was involved | +| | `build.log` | Log of build messages | Only if a build step was involved | +| | `run.log` | Log of general run messages | | + +--- + +### 1.3 Scripts + +- runIvasCodec.py : replacement for runIvasCodec.pl +- IvasBuildAndRun.py : (export), build and run the IVAS framework +- IvasBuildAndRunChecks.py : (export), build and run various checks (CLANG, valgrind, complexity measures) +- testBitexact.py: (export), build and run two different versions of the IVAS codec and test for bitexactness +- self_test.py: run the self tests (replacement for the self_test binary) + +#### Common commandline options for the scripts + +The different scripts share a common set of command line options: +``` + -h, --help show this help message and exit + -z [{silent,console,progress,debug,info,warning,error,critical}], --loglevel [{silent,console,progress,debug,info,warning,error,critical}] + Either show with minimal output (default, 'silent'), or reroute log messages with levels higher than LEVEL to the console + -g LOGFILE, --logfile LOGFILE + log file + -t MAX_WORKERS, --max_workers MAX_WORKERS + use multithreading with MAX_WORKERS threads (default: number of CPUs available at the machine) + -C [FORMAT ...], --formats [FORMAT ...] + List of IVAS formats to run, default all (for possible choices get a list with -L + -m [MODE ...], --modes [MODE ...] + List of IVAS modes to run, default all (for possible choices get a list with -l + --oc [OC_DICT ...] List of output formats, either a space separated list or a json string in single quotes + -E "-opt1 opt2", --enc_options "-opt1 opt2" + Additional command line options for the encoder (always use it in the form -E="-o -opt ...") + -D "-opt1 opt2", --dec_options "-opt1 opt2" + Additional command line options for the decoder (always use it in the form -D="-o -opt ...") + --format_file FORMAT_FILE + File name for the IVAS ivas_format dictionary to use (default: ivas_modes_v2.json) + -I [ITEM ...], --items [ITEM ...] + List of items to be coded, allows for explicit definition of metadata files by grouping an item together with its metadata files in square brackets [ITEM,METADATAFILE,...] + --ism_metadata_files [ISM_MDFILE ...] + List of ISM metadata files + --masa_metadata_file MASA_MDFILE + MASA metadata file + -S SRIN, --srin SRIN Input sample rate for the encoder (either in Hz or kHz) + -R SROUT, --srout SROUT + Output sample rate for the decoder (either in Hz or kHz) + -p CONFIG, --config CONFIG + select site-related config as CONFIG.json + -l, --list_modes list all supported IVAS ivas_formats + -L, --list_formats list all supported IVAS formats + -U LIMIT_DURATION, --limit_duration LIMIT_DURATION + limit dUration by specifying start and end of input signal in seconds. Can be either a single float value (will be interpreted as length), or by giving as start: (will be interpreted as start), or by giving as start:end + -f FER_FILE, --fer_file FER_FILE + frame error pattern file + -y BER_FILE, --ber_file BER_FILE + bit error pattern file + -J JBM_FILE, --jbm_file JBM_FILE + jbm file + -i INDIR, --indir INDIR + Directory for items to be coded, either a single directory or a json string for different directories with the input formats as keys + --decoder_only only run the decoder + -x FILTER_REGEX, --filter FILTER_REGEX + Regex for filtering modes + -s, --sidstart Cut frames from the beginning of the encoded bit stream until the first SID frame + --bs_length BS_LENGTH + Cut bitstream to this (maximum) length. Is applied AFTER --sidstart processing, if this is given + --info Ouput debug info in subfolders of /res (use with caution, this can generate a huge amount of data) + --sofa SOFA Directory for the group B binaural renderer to look for SOFA files + -e ENC, --enc ENC Encoder binary name (default ./IVAS_cod) + -d DEC, --dec DEC Decoder binary name (default ./IVAS_dec) + --fail_log_dir FAIL_LOG_DIR + Move logs of failed modes to dir (default none) +``` + +Some notable difference exits to similar command line options of `runEvsCodec.pl` and a few new ones are added: + +`-i` is only intended for input directories. But `-i` is also more flexible than in `runEvsCodec.pl`. You can still give a single input directory which is then used as input directory for all possible input formats. But you can also give a json string if you like to give different input directories for different input formats. E.g. +``` +-i {\"MONO\":\"/path/to/mono/files\", \"STEREO\":\"/path/to/stereo/files\"} +``` +gives input paths for mono and stereo input files, all other input formats use the default paths form the given config. Don't forget to escape the double quotes. For backwards compatibility to `runEvsCodec.pl` `-i` also accepts a single input file, but it is encouraged to use `-I` for this purpose. + +`-I, --items` is new and only for input files, you can give either absolute paths names or you can use the basename only, then the scripts searches in the given input directories for the item(s). You can also give metadata files belonging to the item direclty here by enclosing them together with the item in square brackets and separated by commas. So e.g.: +``` +--items item1.wav [item2.wav, metadata1foritem2.cvs, metadata2foritem2] +``` +tells the script to use `item1.wav` (with some default metadatafiles if needed) and `item2.wav` together withs the metadata files `metadata1foritem2.csv` and `metadata2foritem2.csv` + +`--metadata_files` gives a list of metadatafiles to be used for all input items where no specific metadata files were given. E.g. +``` +--metadata_files commonmetadatafile1.cvs commonmetadatafile2.csv +``` + +`--format_file` gives a json file with the format specification directory (what was hardcoded in `runEvsCodec.pl` as `$modes_whatever`), the default is `ivas_modes.json`, another with some more subformats for debugging purposes is `ivas_modes_debug.json`. The format is described in the example notebook `ivas_examples.ipynb`, so for specific purposes it is easy to create a dedicated format dictionary. + +`-L, --list_formats` outputs a list of all defined formats in the given format dictionary, these can be used to restrict the formats with `-C` + +`-C, --formats` restricts which formats should be run, e.g. `-C MC ISM1` only runs the MC and ISM1 format (and all modes belonging to the format) + +`-l, --list_modes` outputs a list of availble modes (already restricted if `-C` was given), pretty much like `-l` in `runEvsCodec.pl` but the default modes in the default dictionary `ivas_modes.json` have some nicer names now. Mode names could be used with `-m, --modes` + +`-m, --modes` can now take a list of modes instead of just one + +`--oc` is new and is for defining the list of output formats for decoding eg. `--oc STEREO CICP6 BINAURAL` . There is also a very advanced posibility to give extra decoder command line options for each output format by again providing a json string (this time in single quotes!). If you are interested look at the python code itself... + +`--decoder_only` runs only the decoding. Note that in this case the output directory structure and the encoded files have to exist already. + +`-x, --filter` also accepts regular expressions for filtering modes so a simple `-x swb` will run all SWB modes, while e.g. `-x b(48|64|80).*dtx` will run all dtx modes with 48,64, and 80kbps. For python regular expressions syntax please see https://docs.python.org/3/library/re.html + +`-s, --sidstart` cuts all frames preceding the first SID frame in the encoded bit streams before decoding + +`--info` enables writing to individual sub directories of `/res` for the debug info output (works for `runIvasCodec.py` if DEBUG_MODE_INFO is active, activates DEBUG_MODE_INFO in the build step of `IvasBuildAndRun.py`, has no impact on `IvasBuildAndRunChecks.py` and `testBitexact.py`) + +--- + +#### `runIvasCodec.py` + +Additional arguments: +``` + -V, --valgrind Run with valgrind + -o OUTDIR, --outdir OUTDIR + Output directory for items to be coded, (default + ./out) + -e ENC, --enc ENC Encoder binary name + -d DEC, --dec DEC Decoder binary name +``` +Pretty much self explaining and in line with `runEvsCodec.pl` + +Default binaries are `../../IVAS_cod` and `../../IVAS_dec` relative to the directory `runIvasCodec.py` sits in. + +--- + +#### `IvasBuildAndRun.py` + +(Exports), builds and runs the IVAS codec +``` +Additional arguments: + + --srcdir SRCDIR Existing source file directory or desired directory + for SVN export for the reference + --svn SVN Path to SVN repository for the reference + --r R SVN revision for the reference (default HEAD) + --svnuser SVNUSER SVN user name + --svnpass SVNPASS SVN password + --rebuild force a rebuild of the binaries +``` + +`--srcdir` is either an already existing source code directory or the target directory for the script to export the code from a svn respository into (in this case the code is in reality exported to `srcdir/trunk_or_branch_name/rR` ). This option is mandatory. + +`--svn` optional the SVN URL to be exported. This url can either be the complete svn url or just the relative path to the standard svn URL for the IVAS project. So e.g. `--svn https://INSERT_SVN_REPO/trunk/` and `--svn trunk` would lead to the export of the trunk of the default IVAS project svn repo. + +`--r` The desired revision (if not given, the `HEAD` revision is checked out) + +`--svnuser` Username for the SVN. If ommitted the scripts try to identify the svn user from the saved svn credentials. If not found, the user is prompted for a user name. + +`--svnpass` SVN password for the given svn user. If ommitted the scripts see if the password is saved in the svn credentials, otherwise the user is prompted for the password. + +`--rebuild` forces a rebuild if the `RUN` directory and the encoder and the decoder binaries already exist, normally they would reused and not built anew. + +Example, checks out the trunk of revision 4000 to `/home/user/ivas/automated_tests/trunk/r4000` and runs all MC and SBA modes with 48 kHz input and output sample rate with a special user config and the default format dictionary, the output directory is `/home/user/ivas/automated_tests/trunk/r4000/RUN` +``` +./IvasBuildAndRun.py -p user_linux -C MC SBA -srcdir /home/user/ivas/automated_tests/ --svn https://INSERT_SVN_REPO/trunk/ --r 4000 --svnuser user --srin 48 --srout 48 +``` + +--- + +#### `IvasBuildAndRunChecks.py` + +(Exports), builds and runs some dedictated checks. + +Additional arguments: +``` + --checks [{all,CLANG1,CLANG2,CLANG3,VALGRIND,COMPLEXITY} [{all,CLANG1,CLANG2,CLANG3,VALGRIND,COMPLEXITY} ...]] + List of checks to run, default all + --srcdir SRCDIR Existing source file directory or desired directory + for SVN export + --svn SVN Path to SVN repository + --svnuser SVNUSER SVN user name + --svnpass SVNPASS SVN password + -r R SVN revision (default HEAD) + --def [DEF [DEF ...]] + defines to enable for the version to be tested + --undef [UNDEF [UNDEF ...]] + defines to disable for the version to be tested + --create_complexity_tables CREATE_COMPLEXITY_TABLES + create complexity tables with the given prefix + --create_html_output CREATE_HTML_OUTPUT + create html output for automated tests emails with + given prefix + --rebuild force a rebuild of the binaries +``` + +`--checks` which checks to run, `all` runs `CLANG1 CLANG2 CLANG3 VALGRIND` + +`--srcdir, --svn, --svnuser, --svnpass, -r` like for `IvasBuildAndRun.py` + +`--def` list of defines to enable (from `lib_com/options.h`) + +`--undef` list of defines to disable (from `lib_com/options.h`) + +`--create_complexity_tables` optional if checks includes `COMPLEXITY` , the csv files containing the complexity numbers per mode are created automatically with the file name suffix `CREATE_COMPLEXITY_TABLES` given as argument parameter. + +`--create_html_output` creates the html output used in the automated test emails. + +`--rebuild` forces a rebuild if the chosen checks directories and the encoder and the decoder binaries therein already exist, normally they would reused and not built anew. + + +Example, checks out the trunk of revision 4000 to `/home/user/ivas/automated_tests/trunk/r4000` and runs checks `CLANG1` and `CLANG2` with the define `IVAS_666_APOCALYPSE_FIX` activated in `lib_com/options.h` for all MC and SBA modes with 48 kHz input and output sample rate with a special user config and the default format dictionary, the output directories per check are below `/home/user/ivas/automated_tests/trunk/r4000/` +``` +./IvasBuildAndRunChecks.py -p user_linux -C MC SBA --checks CLANG1 CLANG2 --def IVAS_666_APOCALYPSE_FIX --srcdir /home/user/ivas/automated_tests/ --svn https://INSERT_SVN_REPO/trunk/ --r 4000 --svnuser user --srin 48 --srout 48 +``` +Example, create complextiy tables for the `HEAD` revision of the trunk, only ParamMC modes (needs the `ivas_modes_debug.json` format directory) +``` +./IvasBuildAndRunChecks.py -p user_linux -C MC_ParamMC --format_file ivas_modes_debug.json --checks COMPLEXITY --create_complexity_tables PMC_ --srcdir /home/user/ivas/automated_tests/ --svn https://INSERT_SVN_REPO/trunk/ --r 4000 --svnuser user --srin 48 --srout 48 +``` + +--- + +#### `testBitexact.py` + +(Exports), builds and runs two different versions of the code and comapares the bitexactness of the decoded bit streams +Also usable for just comparing decoded wav files in two different directories. + +Additional arguments: +``` + --srcdirref SRCDIRREF + Existing source file directory or desired directory + for SVN export for the reference + --svnref SVNREF Path to SVN repository for the reference + --rref RREF SVN revision for the reference (default HEAD) + --srcdirtest SRCDIRTEST + Existing source file directory or desired directory + for SVN export for the version to be tested + --svntest SVNTEST Path to SVN repository for the version to be tested + --rtest RTEST SVN revision for the version to be tested (default + HEAD) + --deftest [DEFTEST [DEFTEST ...]] + defines to enable for the version to be tested + --svnuser SVNUSER SVN user name + --svnpass SVNPASS SVN password + --testdeconly only run the decoder to generate files to be tested + --refdeconly only run the decoder to generate ref files + --forcetestbuild rebuild binaries to be tested + --filedirtest FILEDIRTEST + directory for decoded waveforms to be tested + --filedirref FILEDIRREF + directory for decoded referenence waveforms +``` +Similar to `IvasBuildAndRun*.py`, but with dedicated switches for reference code and code to be tested, and also for both test +and reference files a directory with existing waveforms can be given. + +So for either reference or test files one of the three following possibilities is mandatory (only shown for ref files): + +`--svnref --srcdirref [--svnuser --svnpass --rref]` + +`--srcdirref` + +`--filedirref` + +A new build of the encoder decoder binaries for producing the items to be tested can be forced with `--forcetestbuild`. + +The list of files to be tested is, if waveform directories are compared, generated from the waveforms in the reference directory, +otherwise it is generated using the given format dictionary, selected modes and selected output formats and the input +items for encoding the selected modes. + +Example for checking BE between some local code and the trunk for SBA modes and binaural output formats +``` +./testBitexact.py -p user_linux -C SBA --oc BINAURAL BINAURAL_ROOM_IR -srcdirtest /some/local/src/dir/ -srcdirref /home/user/ivas/automated_tests/ --svnref https://INSERT_SVN_REPO/trunk/ --svnuser user --srin 48 --srout 48 +``` +Example for checking BE between the head of a dev branch and a specific version of the trunk, all MC, SBA, MASA modes and all output formats, limit to 10 seconds input signal length for the sake of run time. +``` +./testBitexact.py -p user_linux -C SBA PlanarSBA MC MASA --oc BINAURAL BINAURAL_ROOM_IR MONO STEREO FOA HOA2 HOA3 CICP6 CICP12 CICP16 CICP19 -srcdirtest /home/user/ivas/automated_tests/ --svntest https://INSERT_SVN_REPO/B20290120_some_dev_branch/ -srcdirref /home/user/ivas/automated_tests/ --svnref https://INSERT_SVN_REPO/trunk/ --rref 4000 --svnuser user --srin 48 --srout 48 -U 10 +``` + +--- + +#### `self_test.py` +Runs the self tests + +``` +usage: self_test.py [-h] + [-z [{silent,console,progress,debug,info,warning,error,critical}]] + [-g LOGFILE] [-t MAX_WORKERS] [--create] + [--srcdirref SRCDIRREF] [--svnref SVNREF] [--rref RREF] + [--svnuser SVNUSER] [--svnpass SVNPASS] + [--filedirref FILEDIRREF] [--encref ENCREF] + [--decref DECREF] [--enctest ENCTEST] [--dectest DECTEST] + [-p] + [test_prm] + +positional arguments: + test_prm Test parameter file name + +optional arguments: + -h, --help show this help message and exit + --test_prm TEST_PRM Test parameter file name + --srcdirref SRCDIRREF + Existing source file directory or desired directory + for SVN export for the reference + --svnref SVNREF Path to SVN repository for the reference + --rref RREF SVN revision for the reference (default HEAD) + --svnuser SVNUSER SVN user name + --svnpass SVNPASS SVN password + --filedirref FILEDIRREF + directory for decoded referenence waveforms + --encref ENCREF Reference encoder binary + --decref DECREF Reference decoder binary + --enctest ENCTEST Test encoder binary + --dectest DECTEST Test decoder binary + --logfile LOGFILE file for logging, default .log in the current working + directory + -z [{silent,debug,info,warning,error,critical}], --silent [{silent,debug,info,warning,error,critical}] + Either show with minimal output (default, 'silent'), + or reroute log messages with levels higher than LEVEL + to the console + -t [MAX_WORKERS], --max_workers [MAX_WORKERS] + use multithreading with MAX_WORKERS threads (default: + number of CPUs available at the machine) + --create Create reference conditions with existing binaries + -p, --pesq Run PESQ tool on output mono downmix, default is No +``` +`-p, --pesq` also run the PESQ tool on non-BE conditions + +`[test_prm]` self test parameter file (default config/self_test.prm) + +`--*ref` arguments for the reference conditions, similar to eg. `testBitexact.py` + +`--create` similar behaviour as the `-create` option of the old self_test binary, if `IVAS_cod` and `IVAS_dec` exist in the root directory of the working copy, it creates the reference conditions with these binaries. + + +`self-test.py` can be called without any arguments, then it will first check if +the reference conditions already exist (in the `scripts/ref/` directory of the current working copy), if some are missing, it checks if the +reference encoder and decoder binaries exist (`IVAS_[cod|dec]_ref` in the root directory of the working copy). +If they do not exist, the svn branch and revision of the working copy self_test.py +is called from are determined, a clean copy of the code is checked out and built. +The script then checks if the test binaries are already copied to `self_test/`. +If not, they are built from the working copy. +Missing reference conditions and the test conditions are then generated and +the reference and test conditions are compared. + +----- -- GitLab From 452c1f09166f58da2fb176b27d40f21f15af11c5 Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Fri, 6 Jun 2025 14:52:25 +0200 Subject: [PATCH 4/7] add debugging printouts --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 15dad4ea3..8b7ea8e36 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -502,6 +502,7 @@ workflow: - *check-up-to-date-in-comparison-jobs # need to restore cache again - *overwrite-pytest-cache-with-artifact + - echo "B4 pytest branch - DUT_ENCODER_PATH $DUT_ENCODER_PATH, DUT_DECODER_PATH $DUT_DECODER_PATH" - python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_BRANCH --self-contained-html --junit-xml=$XML_REPORT_BRANCH $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true - zero_errors_branch=$(cat $XML_REPORT_BRANCH | grep -c 'errors="0"') || true - python3 scripts/parse_xml_report.py $XML_REPORT_BRANCH $CSV_BRANCH @@ -520,6 +521,7 @@ workflow: - make -j >> /dev/null # need to restore cache again - *overwrite-pytest-cache-with-artifact + - echo "B4 pytest main - DUT_ENCODER_PATH $DUT_ENCODER_PATH, DUT_DECODER_PATH $DUT_DECODER_PATH" - python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_MAIN --self-contained-html --junit-xml=$XML_REPORT_MAIN $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true - python3 scripts/parse_xml_report.py $XML_REPORT_MAIN $CSV_MAIN -- GitLab From 40e0899dfa7ac929f416359938f88dda721baebc Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Fri, 6 Jun 2025 14:55:40 +0200 Subject: [PATCH 5/7] ad more debug output --- .gitlab-ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8b7ea8e36..aee8e8b3b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -499,6 +499,10 @@ workflow: ### run branch first # this per default builds the branch and the reference and creates the reference outputs - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/build-and-create-float-ref-outputs.sh + - echo "===================================================================" + - ls -l + - env + - echo "===================================================================" - *check-up-to-date-in-comparison-jobs # need to restore cache again - *overwrite-pytest-cache-with-artifact -- GitLab From 48a6993435c1d5013f03ba3b75c925acdc9b218b Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Fri, 6 Jun 2025 15:28:06 +0200 Subject: [PATCH 6/7] activate job to debug again + reduce cases --- .gitlab-ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index aee8e8b3b..b72338b1e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -340,6 +340,7 @@ workflow: PYTEST_LOG_TARGET_BRANCH: "pytest-log-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME.txt" script: - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh + - export PYTEST_ADDOPTS="--last-failed -k JBM" - set -euxo pipefail @@ -466,7 +467,7 @@ workflow: - if [ -s $FAILED_TESTCASES_LIST ]; then - *overwrite-pytest-cache-with-artifact - - export PYTEST_ADDOPTS=--last-failed + - export PYTEST_ADDOPTS="--last-failed -k JBM" - else # turn off echoing back of commands for result printout - *print-results-banner @@ -1011,7 +1012,7 @@ check-be-to-target-short-dec-0db: DUT_DECODER_PATH: "$REF_DECODER_PATH" MERGE_TARGET_DECODER_PATH: "$REF_DECODER_PATH" -.check-regressions-short-dec-0db: +check-regressions-short-dec-0db: extends: - .check-regressions-pytest-job needs: -- GitLab From 7d1f559edf23487571ffbbd67ffc0f07f5c9b592 Mon Sep 17 00:00:00 2001 From: Jan Kiene Date: Fri, 6 Jun 2025 16:36:09 +0200 Subject: [PATCH 7/7] more debug printout --- .gitlab-ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b72338b1e..12b5c75dd 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -508,6 +508,7 @@ workflow: # need to restore cache again - *overwrite-pytest-cache-with-artifact - echo "B4 pytest branch - DUT_ENCODER_PATH $DUT_ENCODER_PATH, DUT_DECODER_PATH $DUT_DECODER_PATH" + - ls -l - python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_BRANCH --self-contained-html --junit-xml=$XML_REPORT_BRANCH $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true - zero_errors_branch=$(cat $XML_REPORT_BRANCH | grep -c 'errors="0"') || true - python3 scripts/parse_xml_report.py $XML_REPORT_BRANCH $CSV_BRANCH @@ -523,10 +524,11 @@ workflow: - git checkout $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - git pull origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - make clean - - make -j >> /dev/null + - make -j # need to restore cache again - *overwrite-pytest-cache-with-artifact - echo "B4 pytest main - DUT_ENCODER_PATH $DUT_ENCODER_PATH, DUT_DECODER_PATH $DUT_DECODER_PATH" + - ls -l - python3 -m pytest --tb=no -q $TEST_SUITE -v --keep_files --create_cut --html=$HTML_REPORT_MAIN --self-contained-html --junit-xml=$XML_REPORT_MAIN $comp_args --ref_encoder_path $REF_ENCODER_PATH --ref_decoder_path $REF_DECODER_PATH --dut_encoder_path $DUT_ENCODER_PATH --dut_decoder_path $DUT_DECODER_PATH -n auto --testcase_timeout $testcase_timeout || true - python3 scripts/parse_xml_report.py $XML_REPORT_MAIN $CSV_MAIN -- GitLab