From 40adfdf9fd2358c6daa5f80c1b1dc65b22657715 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Tue, 26 Aug 2025 07:56:32 +0200 Subject: [PATCH 01/10] Add scripts/merge_logs.py --- scripts/merge_logs.py | 60 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 scripts/merge_logs.py diff --git a/scripts/merge_logs.py b/scripts/merge_logs.py new file mode 100644 index 0000000000..7e332e4f52 --- /dev/null +++ b/scripts/merge_logs.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import argparse +from pathlib import Path +import pandas as pd + +def main(logs_dir, output_filename, measure): + + input_path = Path(logs_dir) + logs = [f for f in input_path.iterdir() if f.is_dir()] + + # Build dict of scores + logdict = {} + for log in logs: + date = log.name + logdict[date] = {} + for logfile in log.glob('*.csv'): + tmp = logfile.name.split('-') + job = '-'.join(tmp[3:-4]) + #sha = tmp[-1].split('.')[0] + #logdict[date]["sha"] = sha # Maybe we want to store the SHA too somehow. + data = pd.read_csv(logfile, usecols=["testcase", measure]) + logdict[date][job] = {} + + for testcase, value in zip(data["testcase"], data[measure]): + logdict[date][job][testcase] = value + + # Restructure dict + csv_rows = [] + for date, jobs in logdict.items(): + for job, testcases in jobs.items(): + for testcase, value in testcases.items(): + csv_rows.append((job, testcase, date, value)) + + result = pd.DataFrame(csv_rows, columns=["job","testcase","date","value"]) + result = result.pivot(index=['job', 'testcase'], columns='date', values="value").reset_index() + result.to_csv(output_filename, sep=';', index=False) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="logs dir") + parser.add_argument( + "logs_dir", + type=str, + help="Logs dir, e.g. logs", + ) + parser.add_argument( + "output_filename", + type=str, + help="Filename of the combined csv file. e.g mld.csv", + ) + parser.add_argument( + "--measure", + type=str, + help="Measure for summary, one of MLD MIN_SSNR MAX_ABS_DIFF MIN_ODG, (default: MLD)", + default="MLD", + ) + + args = parser.parse_args() + + main(args.logs_dir, args.output_filename, args.measure) \ No newline at end of file -- GitLab From 329d9f555d996fb9d0c4aab94df13fa0f03f7d59 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 1 Sep 2025 16:25:13 +0200 Subject: [PATCH 02/10] Add output of scripts to reproduce critical cases --- scripts/find_regressions_from_logs.py | 237 ++++++++++++++++++++++++++ scripts/merge_logs.py | 60 ------- 2 files changed, 237 insertions(+), 60 deletions(-) create mode 100644 scripts/find_regressions_from_logs.py delete mode 100644 scripts/merge_logs.py diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py new file mode 100644 index 0000000000..522d144436 --- /dev/null +++ b/scripts/find_regressions_from_logs.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 + +import argparse +from pathlib import Path +import pandas as pd + +REPRODUCE_REGRESSION_SCRIPT_TMPL = """ +#!/bin/bash -x + +SCRIPTS_DIR=/usr/local/scripts +LTV_DIR=/usr/local/ltv + +MIN_DATE={min_date} +MIN_SHA={min_sha} +LEVEL_SCALING={level_scaling} +TESTCASE="{testcase}" + +REF_ENC1={REF_ENC1} +REF_DEC1={REF_DEC1} +DUT_ENC1={DUT_ENC1} +DUT_DEC1={DUT_DEC1} + +REF_ENC2={REF_ENC2} +REF_DEC2={REF_DEC2} +DUT_ENC2={DUT_ENC2} +DUT_DEC2={DUT_DEC2} + +# Obtain executables from past reference +git checkout 'ivas-float-update@{$MIN_DATE 22:00:00}' +make clean +make -j +mv IVAS_cod IVAS_cod_ref_1 +mv IVAS_dec IVAS_dec_ref_1 +mv IVAS_rend IVAS_rend_ref_1 + +git checkout $MIN_SHA +make clean +make -j +mv IVAS_cod IVAS_cod_1 +mv IVAS_dec IVAS_dec_1 +mv IVAS_rend IVAS_rend_1 + +# Obtain latest executables +git checkout ivas-float-update +git pull +make clean +make -j +mv IVAS_cod IVAS_cod_ref_2 +mv IVAS_dec IVAS_dec_ref_2 +mv IVAS_rend IVAS_rend_ref_2 + +git checkout main +git pull +make clean +make -j +mv IVAS_cod IVAS_cod_2 +mv IVAS_dec IVAS_dec_2 +mv IVAS_rend IVAS_rend_2 + +# Get fresh copy of scripts, tests and ci +cp -r $SCRIPTS_DIR/{scripts,tests,ci,pytest.ini} . +python3 ci/remove_unsupported_testcases.py scripts/config/self_test.prm scripts/config/self_test_ltv.prm # Should not be needed since only supported testcases should be input + +# Get LTVs +cp $LTV_DIR/* scripts/testv + +# Apply level scaling +tests/scale_pcm.py ./scripts/testv/ "$LEVEL_SCALING" + +# Run tests +cp IVAS_rend_ref_1 IVAS_rend_ref +cp IVAS_rend_1 IVAS_rend +python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --ref_encoder_path $REF_ENC1 --ref_decoder_path $REF_DEC1 +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg + +# Store results from first run +mkdir tests1 +cp -r tests/ref tests/dut tests1 +cp -r tests/renderer/ref tests/renderer/cut tests1 + +cp IVAS_rend_ref_2 IVAS_rend_ref +cp IVAS_rend_2 IVAS_rend +python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --ref_encoder_path $REF_ENC2 --ref_decoder_path $REF_DEC2 +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg + +""" + + +def main(logs_dir, output_filename, measure): + + input_path = Path(logs_dir) + logs = [f for f in input_path.iterdir() if f.is_dir()] + + # Build dict of scores + formatdict = {} + sha = {} + logdict = {} + for log in logs: + date = log.name + logdict[date] = {} + formatdict[date] = {} + for logfile in log.glob("*.csv"): + tmp = logfile.name.split("-") + job = "-".join(tmp[3:-4]) + sha[date] = tmp[-1].split(".")[0] + data = pd.read_csv(logfile, usecols=["testcase", measure, "format"]) + logdict[date][job] = {} + formatdict[date][job] = {} + + for testcase, value, format in zip( + data["testcase"], data[measure], data["format"] + ): + formatdict[date][job][testcase] = format + logdict[date][job][testcase] = value + + # Restructure dict + csv_rows = [] + formats = [] + for date, jobs in logdict.items(): + for job, testcases in jobs.items(): + for testcase, value in testcases.items(): + csv_rows.append((job, testcase, date, value)) + formats.append((job, testcase, date, formatdict[date][job][testcase])) + + result = pd.DataFrame(csv_rows, columns=["job", "testcase", "date", "value"]) + result = result.pivot( + index=["job", "testcase"], columns="date", values="value" + ).reset_index() + + f = pd.DataFrame(formats, columns=["job", "testcase", "date", "format"]) + f = f.pivot( + index=["job", "testcase"], columns="date", values="format" + ).reset_index() + + values = result.iloc[:, 2:] + last_date = values.columns[-1] + + result.insert(2, "format", f[last_date]) + result.insert(3, "min_date", values.idxmin(axis=1)) + result.insert(4, "min_sha", result["min_date"].map(sha)) + result.insert(5, "curr_value", values[last_date]) + result.insert(6, "min_value", values.min(axis=1)) + result.insert(7, "diff", result["curr_value"] - result["min_value"]) + result.insert(8, "ratio", result["curr_value"] / result["min_value"]) + result.loc[result["min_value"] == 0, "ratio"] = ( + 1 # Set ratio to 1 for denominator 0 + ) + + result["min_sha"] = "'" + result["min_sha"] + + result.to_csv(output_filename, sep=";", index=False) + + critical = result.iloc[:, 0:9] + formats = list(set(critical["format"])) + critical3 = pd.DataFrame() + + for format in formats: + top3 = ( + critical[critical["format"] == format] + .sort_values(by="ratio", ascending=False) + .head(3) + ) + critical3 = pd.concat([critical3, top3], ignore_index=True) + + critical3.to_csv("critical3.csv", sep=";", index=False) + + row_counter = 1 + for row in critical3.row(): + + # Find level + level_scaling = 1.0 + if "lev+10" in row["job"]: + level_scaling = 3.162 + if "lev-10" in row["job"]: + level_scaling = 0.3162 + + # Find executables setup + REF_ENC1 = "IVAS_cod_ref_1" + REF_DEC1 = "IVAS_dec_ref_1" + DUT_ENC1 = "IVAS_ref_1" + DUT_DEC1 = "IVAS_dec_1" + REF_ENC2 = "IVAS_cod_ref_2" + REF_DEC2 = "IVAS_dec_ref_2" + DUT_ENC2 = "IVAS_ref_2" + DUT_DEC2 = "IVAS_dec_2" + + if "dec" in row["job"]: + DUT_ENC1 = "IVAS_cod_ref_1" + DUT_ENC2 = "IVAS_cod_ref_2" + if "enc" in row["job"]: + DUT_DEC1 = "IVAS_dec_ref_1" + DUT_DEC2 = "IVAS_dec_ref_2" + + script_content = REPRODUCE_REGRESSION_SCRIPT_TMPL.format( + min_date=row["min_date"], + min_sha=row["min_sha"][1:], + LEVEL_SCALING={level_scaling}, + TESTCASE=row["testcase"][1:], + REF_ENC1 = REF_ENC1, + REF_DEC1 = REF_DEC1, + DUT_ENC1 = DUT_ENC1, + DUT_DEC1 = DUT_DEC1, + REF_ENC2 = REF_ENC2, + REF_DEC2 = REF_DEC2, + DUT_ENC2 = DUT_ENC2, + DUT_DEC2 = DUT_DEC2, + ) + + script_filename = f"regression_{row_counter:02d}.bash" + with open(script_filename, "w") as f: + f.write(script_content) + + row_counter = row_counter + 1 + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="logs dir") + parser.add_argument( + "logs_dir", + type=str, + help="Logs dir, e.g. logs", + ) + parser.add_argument( + "output_filename", + type=str, + help="Filename of the combined csv file. e.g mld.csv", + ) + parser.add_argument( + "--measure", + type=str, + help="Measure for summary, one of MLD MIN_SSNR MAX_ABS_DIFF MIN_ODG, (default: MLD)", + default="MLD", + ) + + args = parser.parse_args() + + main(args.logs_dir, args.output_filename, args.measure) diff --git a/scripts/merge_logs.py b/scripts/merge_logs.py deleted file mode 100644 index 7e332e4f52..0000000000 --- a/scripts/merge_logs.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -from pathlib import Path -import pandas as pd - -def main(logs_dir, output_filename, measure): - - input_path = Path(logs_dir) - logs = [f for f in input_path.iterdir() if f.is_dir()] - - # Build dict of scores - logdict = {} - for log in logs: - date = log.name - logdict[date] = {} - for logfile in log.glob('*.csv'): - tmp = logfile.name.split('-') - job = '-'.join(tmp[3:-4]) - #sha = tmp[-1].split('.')[0] - #logdict[date]["sha"] = sha # Maybe we want to store the SHA too somehow. - data = pd.read_csv(logfile, usecols=["testcase", measure]) - logdict[date][job] = {} - - for testcase, value in zip(data["testcase"], data[measure]): - logdict[date][job][testcase] = value - - # Restructure dict - csv_rows = [] - for date, jobs in logdict.items(): - for job, testcases in jobs.items(): - for testcase, value in testcases.items(): - csv_rows.append((job, testcase, date, value)) - - result = pd.DataFrame(csv_rows, columns=["job","testcase","date","value"]) - result = result.pivot(index=['job', 'testcase'], columns='date', values="value").reset_index() - result.to_csv(output_filename, sep=';', index=False) - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="logs dir") - parser.add_argument( - "logs_dir", - type=str, - help="Logs dir, e.g. logs", - ) - parser.add_argument( - "output_filename", - type=str, - help="Filename of the combined csv file. e.g mld.csv", - ) - parser.add_argument( - "--measure", - type=str, - help="Measure for summary, one of MLD MIN_SSNR MAX_ABS_DIFF MIN_ODG, (default: MLD)", - default="MLD", - ) - - args = parser.parse_args() - - main(args.logs_dir, args.output_filename, args.measure) \ No newline at end of file -- GitLab From bdd825dc29b3561a46c170cb133898fd4de3772f Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 1 Sep 2025 16:44:16 +0200 Subject: [PATCH 03/10] Fix to regression script generation --- scripts/find_regressions_from_logs.py | 33 ++++++++++++--------------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 522d144436..55ff526687 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -26,7 +26,7 @@ DUT_ENC2={DUT_ENC2} DUT_DEC2={DUT_DEC2} # Obtain executables from past reference -git checkout 'ivas-float-update@{$MIN_DATE 22:00:00}' +git checkout 'ivas-float-update@{{$MIN_DATE 22:00:00}}' make clean make -j mv IVAS_cod IVAS_cod_ref_1 @@ -58,7 +58,7 @@ mv IVAS_dec IVAS_dec_2 mv IVAS_rend IVAS_rend_2 # Get fresh copy of scripts, tests and ci -cp -r $SCRIPTS_DIR/{scripts,tests,ci,pytest.ini} . +cp -r $SCRIPTS_DIR/{{scripts,tests,ci,pytest.ini}} . python3 ci/remove_unsupported_testcases.py scripts/config/self_test.prm scripts/config/self_test_ltv.prm # Should not be needed since only supported testcases should be input # Get LTVs @@ -164,8 +164,7 @@ def main(logs_dir, output_filename, measure): critical3.to_csv("critical3.csv", sep=";", index=False) - row_counter = 1 - for row in critical3.row(): + for row_counter, row in critical3.iterrows(): # Find level level_scaling = 1.0 @@ -173,7 +172,7 @@ def main(logs_dir, output_filename, measure): level_scaling = 3.162 if "lev-10" in row["job"]: level_scaling = 0.3162 - + # Find executables setup REF_ENC1 = "IVAS_cod_ref_1" REF_DEC1 = "IVAS_dec_ref_1" @@ -190,28 +189,26 @@ def main(logs_dir, output_filename, measure): if "enc" in row["job"]: DUT_DEC1 = "IVAS_dec_ref_1" DUT_DEC2 = "IVAS_dec_ref_2" - + script_content = REPRODUCE_REGRESSION_SCRIPT_TMPL.format( min_date=row["min_date"], min_sha=row["min_sha"][1:], - LEVEL_SCALING={level_scaling}, - TESTCASE=row["testcase"][1:], - REF_ENC1 = REF_ENC1, - REF_DEC1 = REF_DEC1, - DUT_ENC1 = DUT_ENC1, - DUT_DEC1 = DUT_DEC1, - REF_ENC2 = REF_ENC2, - REF_DEC2 = REF_DEC2, - DUT_ENC2 = DUT_ENC2, - DUT_DEC2 = DUT_DEC2, + level_scaling=level_scaling, + testcase=row["testcase"][1:], + REF_ENC1=REF_ENC1, + REF_DEC1=REF_DEC1, + DUT_ENC1=DUT_ENC1, + DUT_DEC1=DUT_DEC1, + REF_ENC2=REF_ENC2, + REF_DEC2=REF_DEC2, + DUT_ENC2=DUT_ENC2, + DUT_DEC2=DUT_DEC2, ) script_filename = f"regression_{row_counter:02d}.bash" with open(script_filename, "w") as f: f.write(script_content) - row_counter = row_counter + 1 - if __name__ == "__main__": parser = argparse.ArgumentParser(description="logs dir") -- GitLab From a93771a9f6f7d2af25b001c93fb190214a1f5a2e Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 1 Sep 2025 16:53:09 +0200 Subject: [PATCH 04/10] Correct testcase printout and row numbering in find_regressions_from_logs.py --- scripts/find_regressions_from_logs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 55ff526687..4479998bdc 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -194,7 +194,7 @@ def main(logs_dir, output_filename, measure): min_date=row["min_date"], min_sha=row["min_sha"][1:], level_scaling=level_scaling, - testcase=row["testcase"][1:], + testcase=row["testcase"], REF_ENC1=REF_ENC1, REF_DEC1=REF_DEC1, DUT_ENC1=DUT_ENC1, @@ -205,7 +205,7 @@ def main(logs_dir, output_filename, measure): DUT_DEC2=DUT_DEC2, ) - script_filename = f"regression_{row_counter:02d}.bash" + script_filename = f"regression_{row_counter+2:03d}.bash" with open(script_filename, "w") as f: f.write(script_content) -- GitLab From 88b91a23c57085489e6d3f1c407e75ee561fa82d Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 1 Sep 2025 18:27:00 +0200 Subject: [PATCH 05/10] Fix for renderer tests, and checkout by date --- scripts/find_regressions_from_logs.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 4479998bdc..4fba13730e 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -26,7 +26,7 @@ DUT_ENC2={DUT_ENC2} DUT_DEC2={DUT_DEC2} # Obtain executables from past reference -git checkout 'ivas-float-update@{{$MIN_DATE 22:00:00}}' +git checkout `git rev-list -1 --before="$MIN_DATE 22:00:00" ivas-float-update` make clean make -j mv IVAS_cod IVAS_cod_ref_1 @@ -70,8 +70,8 @@ tests/scale_pcm.py ./scripts/testv/ "$LEVEL_SCALING" # Run tests cp IVAS_rend_ref_1 IVAS_rend_ref cp IVAS_rend_1 IVAS_rend -python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --ref_encoder_path $REF_ENC1 --ref_decoder_path $REF_DEC1 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg +python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC1 --ref_decoder_path $REF_DEC1 +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg # Store results from first run mkdir tests1 @@ -80,8 +80,8 @@ cp -r tests/renderer/ref tests/renderer/cut tests1 cp IVAS_rend_ref_2 IVAS_rend_ref cp IVAS_rend_2 IVAS_rend -python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --ref_encoder_path $REF_ENC2 --ref_decoder_path $REF_DEC2 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg +python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC2 --ref_decoder_path $REF_DEC2 +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg """ -- GitLab From f4acb5fd143fbdd7faaa8bd252eb8d721a41581f Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Mon, 1 Sep 2025 19:08:52 +0200 Subject: [PATCH 06/10] Fix for encoder executables in scripts/find_regressions_from_logs.py --- scripts/find_regressions_from_logs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 4fba13730e..d40a5cbef0 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -176,11 +176,11 @@ def main(logs_dir, output_filename, measure): # Find executables setup REF_ENC1 = "IVAS_cod_ref_1" REF_DEC1 = "IVAS_dec_ref_1" - DUT_ENC1 = "IVAS_ref_1" + DUT_ENC1 = "IVAS_cod_1" DUT_DEC1 = "IVAS_dec_1" REF_ENC2 = "IVAS_cod_ref_2" REF_DEC2 = "IVAS_dec_ref_2" - DUT_ENC2 = "IVAS_ref_2" + DUT_ENC2 = "IVAS_cod_2" DUT_DEC2 = "IVAS_dec_2" if "dec" in row["job"]: -- GitLab From 9eea37e7f5075724766e70e1e336f3f6ba2871e5 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Tue, 2 Sep 2025 08:30:14 +0200 Subject: [PATCH 07/10] Add cleanup of tests folder --- scripts/find_regressions_from_logs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index d40a5cbef0..9c25412c01 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -59,6 +59,7 @@ mv IVAS_rend IVAS_rend_2 # Get fresh copy of scripts, tests and ci cp -r $SCRIPTS_DIR/{{scripts,tests,ci,pytest.ini}} . +rm -rf tests/ref tests/dut tests/renderer/ref tests/renderer/cut python3 ci/remove_unsupported_testcases.py scripts/config/self_test.prm scripts/config/self_test_ltv.prm # Should not be needed since only supported testcases should be input # Get LTVs -- GitLab From 3cdfceefe41bc4e4b0cf0c9a9fdfcff93ceeffd2 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Tue, 2 Sep 2025 08:51:44 +0200 Subject: [PATCH 08/10] Fixes in find_regression script --- scripts/find_regressions_from_logs.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 9c25412c01..ba473c859e 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -72,17 +72,20 @@ tests/scale_pcm.py ./scripts/testv/ "$LEVEL_SCALING" cp IVAS_rend_ref_1 IVAS_rend_ref cp IVAS_rend_1 IVAS_rend python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC1 --ref_decoder_path $REF_DEC1 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg --junit-xml=report1.xml +python3 scripts/parse_xml_report.py report1.xml report1.csv # Store results from first run -mkdir tests1 +mkdir -p tests1/renderer cp -r tests/ref tests/dut tests1 -cp -r tests/renderer/ref tests/renderer/cut tests1 +cp -r tests/renderer/ref tests1/renderer +cp -r tests/renderer/cut tests1/renderer cp IVAS_rend_ref_2 IVAS_rend_ref cp IVAS_rend_2 IVAS_rend python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC2 --ref_decoder_path $REF_DEC2 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg --junit-xml=report2.xml +python3 scripts/parse_xml_report.py report2.xml report2.csv """ -- GitLab From 7bfb74f4be99fa778355af6102fa1d300d31e820 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Tue, 2 Sep 2025 11:38:55 +0200 Subject: [PATCH 09/10] Add SHAs in versions.txt and html report --- scripts/find_regressions_from_logs.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index ba473c859e..01826f23b1 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -25,8 +25,11 @@ REF_DEC2={REF_DEC2} DUT_ENC2={DUT_ENC2} DUT_DEC2={DUT_DEC2} +INV_LEVEL_SCALING=$(awk "BEGIN {{print 1.0 / $LEVEL_SCALING}}") + # Obtain executables from past reference git checkout `git rev-list -1 --before="$MIN_DATE 22:00:00" ivas-float-update` +echo "ivas_float_update, min version: `git rev-parse HEAD`" > versions.txt make clean make -j mv IVAS_cod IVAS_cod_ref_1 @@ -34,6 +37,7 @@ mv IVAS_dec IVAS_dec_ref_1 mv IVAS_rend IVAS_rend_ref_1 git checkout $MIN_SHA +echo "main, min version: `git rev-parse HEAD`" >> versions.txt make clean make -j mv IVAS_cod IVAS_cod_1 @@ -43,6 +47,7 @@ mv IVAS_rend IVAS_rend_1 # Obtain latest executables git checkout ivas-float-update git pull +echo "ivas-float-update, current version: `git rev-parse HEAD`" >> versions.txt make clean make -j mv IVAS_cod IVAS_cod_ref_2 @@ -51,6 +56,7 @@ mv IVAS_rend IVAS_rend_ref_2 git checkout main git pull +echo "main, current version: `git rev-parse HEAD`" >> versions.txt make clean make -j mv IVAS_cod IVAS_cod_2 @@ -60,7 +66,7 @@ mv IVAS_rend IVAS_rend_2 # Get fresh copy of scripts, tests and ci cp -r $SCRIPTS_DIR/{{scripts,tests,ci,pytest.ini}} . rm -rf tests/ref tests/dut tests/renderer/ref tests/renderer/cut -python3 ci/remove_unsupported_testcases.py scripts/config/self_test.prm scripts/config/self_test_ltv.prm # Should not be needed since only supported testcases should be input +python3 ci/remove_unsupported_testcases.py scripts/config/self_test.prm scripts/config/self_test_ltv.prm # Get LTVs cp $LTV_DIR/* scripts/testv @@ -72,7 +78,7 @@ tests/scale_pcm.py ./scripts/testv/ "$LEVEL_SCALING" cp IVAS_rend_ref_1 IVAS_rend_ref cp IVAS_rend_1 IVAS_rend python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC1 --ref_decoder_path $REF_DEC1 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg --junit-xml=report1.xml +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC1 --dut_decoder_path $DUT_DEC1 --mld --ssnr --odg --scalefac $INV_LEVEL_SCALING --junit-xml=report1.xml --html=report1.html --self-contained-html python3 scripts/parse_xml_report.py report1.xml report1.csv # Store results from first run @@ -84,7 +90,7 @@ cp -r tests/renderer/cut tests1/renderer cp IVAS_rend_ref_2 IVAS_rend_ref cp IVAS_rend_2 IVAS_rend python3 -m pytest "$TESTCASE" -n 1 --update_ref 1 --create_ref --param_file scripts/config/self_test_ltv.prm --use_ltv --ref_encoder_path $REF_ENC2 --ref_decoder_path $REF_DEC2 -python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg --junit-xml=report2.xml +python3 -m pytest "$TESTCASE" -n 1 --create_cut --param_file scripts/config/self_test_ltv.prm --use_ltv --dut_encoder_path $DUT_ENC2 --dut_decoder_path $DUT_DEC2 --mld --ssnr --odg --scalefac $INV_LEVEL_SCALING --junit-xml=report2.xml --html=report2.html --self-contained-html python3 scripts/parse_xml_report.py report2.xml report2.csv """ @@ -155,7 +161,7 @@ def main(logs_dir, output_filename, measure): result.to_csv(output_filename, sep=";", index=False) critical = result.iloc[:, 0:9] - formats = list(set(critical["format"])) + formats = list(set(critical["format"])).sort() critical3 = pd.DataFrame() for format in formats: -- GitLab From 4080c5b0fce2c0fc8bf44f5bd246256b1fcf1479 Mon Sep 17 00:00:00 2001 From: Erik Norvell Date: Tue, 2 Sep 2025 11:48:54 +0200 Subject: [PATCH 10/10] Fix sorting --- scripts/find_regressions_from_logs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/find_regressions_from_logs.py b/scripts/find_regressions_from_logs.py index 01826f23b1..068fe77871 100644 --- a/scripts/find_regressions_from_logs.py +++ b/scripts/find_regressions_from_logs.py @@ -161,7 +161,8 @@ def main(logs_dir, output_filename, measure): result.to_csv(output_filename, sep=";", index=False) critical = result.iloc[:, 0:9] - formats = list(set(critical["format"])).sort() + formats = list(set(critical["format"])) + formats.sort() critical3 = pd.DataFrame() for format in formats: -- GitLab