From 31c1c37bcd79455068f4cd6b821678c0d6fd1322 Mon Sep 17 00:00:00 2001 From: Treffehn Date: Fri, 15 Sep 2023 17:01:50 +0200 Subject: [PATCH 1/9] started jbm splitting support --- ivas_processing_scripts/processing/evs.py | 1 + ivas_processing_scripts/processing/ivas.py | 2 +- .../processing/processing.py | 39 ++++++++++++++++++- .../processing_splitting_scaling.py | 9 ++++- 4 files changed, 48 insertions(+), 3 deletions(-) diff --git a/ivas_processing_scripts/processing/evs.py b/ivas_processing_scripts/processing/evs.py index 80b9fe14..fe96b379 100755 --- a/ivas_processing_scripts/processing/evs.py +++ b/ivas_processing_scripts/processing/evs.py @@ -377,6 +377,7 @@ class EVS(Processing): logger: Optional[logging.Logger] = None, ) -> Tuple[Union[Path, str], bool]: if self.tx is not None: + # TODO: (treffehn) change this part -> delay error profile to FER profile and apply if self.tx["type"] == "JBM": bs, ext = os.path.splitext(bitstream) bitstream_processed = Path(f"{bs}_processed{ext}") diff --git a/ivas_processing_scripts/processing/ivas.py b/ivas_processing_scripts/processing/ivas.py index 217f47ad..e4f96a1d 100755 --- a/ivas_processing_scripts/processing/ivas.py +++ b/ivas_processing_scripts/processing/ivas.py @@ -292,7 +292,7 @@ class IVAS(Processing): # add -voip cmdline option to the decoder if voip: - cmd.extend(["-voip"]) + cmd.extend(["-voip", "-Tracefile", f"{str(out_file).split('.')[0]}.tracefile.csv"]) if self.dec_opts: cmd.extend(self.dec_opts) diff --git a/ivas_processing_scripts/processing/processing.py b/ivas_processing_scripts/processing/processing.py index 661faea6..6fd2bd6b 100755 --- a/ivas_processing_scripts/processing/processing.py +++ b/ivas_processing_scripts/processing/processing.py @@ -173,7 +173,7 @@ def concat_setup(cfg: TestConfig, chain, logger: logging.Logger): logger.info(f"Splits written to file {splits_info_file}") -def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, logger: logging.Logger): +def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, logger: logging.Logger): if not splits: raise ValueError("Splitting not possible without split marker") @@ -188,6 +188,42 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, logger: logging.Logger) for split_i in splits: new_splits.append(int(float(split_i) * relative_fs_change)) splits = new_splits + # TODO: add jbmtrim compensation +# rtpTsRelErr = (entry.rtpTs / rtpTimeScale - nextCutTime) / ((entry.rtpTs - lastRtpTs) / rtpTimeScale); +# double +# playTimeAbsErr = rtpTsRelErr * (entry.playTime - lastPlayTime) / playTimeScale; +# if (currBeginPlayTime < 0) { +# // found begin of item +# currBeginPlayTime = entry.playTime / playTimeScale - playTimeAbsErr; +# // now look for end of item +# nextCutTime += itemLength; +# } +# else { +# // found complete item +# double currEndPlayTime = entry.playTime / playTimeScale - playTimeAbsErr; +# mappedStart = currBeginPlayTime; +# mappedLength = currEndPlayTime - currBeginPlayTime; +# return true; +# } +# } +# lastRtpTs = entry.rtpTs; +# lastPlayTime = entry.playTime; +# } +# // check if item +# begin +# was +# found +# if (currBeginPlayTime < 0) +# { +# cerr << "Invalid item start position specified: " << itemStart << endl; +# +# +# return false; +# } +# // return item +# with missing end +# mappedStart = currBeginPlayTime; +# mappedLength = lastPlayTime / playTimeScale - currBeginPlayTime; # check if last split ending coincides with last sample of signal if splits[-1] > len(x): @@ -214,6 +250,7 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, logger: logging.Logger) # split ISM metadata if out_fmt.startswith("ISM"): + # TODO: (treffehn) add error message if output is ism or masa and jbm was used split_meta_object = [] for obj_meta in meta: # compute number of frames per split diff --git a/ivas_processing_scripts/processing/processing_splitting_scaling.py b/ivas_processing_scripts/processing/processing_splitting_scaling.py index 4b162f68..055bc1d5 100644 --- a/ivas_processing_scripts/processing/processing_splitting_scaling.py +++ b/ivas_processing_scripts/processing/processing_splitting_scaling.py @@ -210,10 +210,17 @@ class Processing_splitting_scaling(Processing): ) ) splits, split_names, split_fs = read_splits_file(splits_info_file) + # TODO: (treffehn) if jbm and ivas instead of noerror + if not noerror: + # read out tracefile with jbm info + tracefile_info_file = Path(f"{in_file.with_suffix('').with_suffix('')}.tracefile.csv") + tracefile_info = np.genfromtxt(tracefile_info_file, delimiter=";") + else: + tracefile_info = None # split file file_splits, meta_splits = concat_teardown( - x, splits, self.out_fmt, fs, split_fs, in_meta, logger + x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, logger ) # set new out_files -- GitLab From 8546bbd929dcf230934c5748a8bcd1af79f91725 Mon Sep 17 00:00:00 2001 From: Treffehn Date: Mon, 18 Sep 2023 19:19:09 +0200 Subject: [PATCH 2/9] added ivas jbm condition splitting compensation --- ivas_processing_scripts/processing/chains.py | 3 + .../processing/processing.py | 130 ++++++++++-------- .../processing_splitting_scaling.py | 5 +- 3 files changed, 78 insertions(+), 60 deletions(-) diff --git a/ivas_processing_scripts/processing/chains.py b/ivas_processing_scripts/processing/chains.py index f9ec2c79..1b6d9341 100755 --- a/ivas_processing_scripts/processing/chains.py +++ b/ivas_processing_scripts/processing/chains.py @@ -238,6 +238,7 @@ def get_processing_chain( tmp_mnru_q = None tmp_esdru_alpha = None tx_condition = False + ivas_jbm = False # override / add values based on specific conditions cond_cfg = cfg.conditions_to_generate[condition] @@ -395,6 +396,7 @@ def get_processing_chain( "error_profile": tx_cfg_tmp.get("error_profile", None), "n_frames_per_packet": tx_cfg_tmp.get("n_frames_per_packet", None), } + ivas_jbm = True else: raise ValueError( "Type of bitstream procesing either missing or not valid" @@ -507,6 +509,7 @@ def get_processing_chain( "loudness_fmt": post_cfg.get("loudness_fmt", None), "tx_condition": tx_condition, "condition_in_output_filename": cfg.condition_in_output_filename, + "ivas_jbm": ivas_jbm, } ) ) diff --git a/ivas_processing_scripts/processing/processing.py b/ivas_processing_scripts/processing/processing.py index 6fd2bd6b..29b841b4 100755 --- a/ivas_processing_scripts/processing/processing.py +++ b/ivas_processing_scripts/processing/processing.py @@ -40,6 +40,7 @@ from shutil import copyfile from time import sleep from typing import Iterable, Union from warnings import warn +import numpy as np from ivas_processing_scripts.audiotools import audio from ivas_processing_scripts.audiotools.audioarray import window @@ -173,74 +174,92 @@ def concat_setup(cfg: TestConfig, chain, logger: logging.Logger): logger.info(f"Splits written to file {splits_info_file}") -def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, logger: logging.Logger): - if not splits: +def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, logger: logging.Logger): + if splits is None: raise ValueError("Splitting not possible without split marker") + if ivas_jbm and tracefile is None: + raise ValueError("Splitting for IVAS JBM conditions not possible without tracefile") - if logger: + if (out_fmt.startswith("ISM") or out_fmt.startswith("MASA")) and ivas_jbm: + raise ValueError("Splitting with JBM compensation not supportet for formats with metadata (e.g. MASA, ISM)") + + if logger and ivas_jbm: + logger.debug("Split files with JBM compensation") + elif logger: logger.debug("Split files") - # if sampling rate changed, adjust splits - fs_new = float(fs) - fs_old = float(in_fs) - relative_fs_change = fs_new / fs_old - new_splits = [] - for split_i in splits: - new_splits.append(int(float(split_i) * relative_fs_change)) - splits = new_splits - # TODO: add jbmtrim compensation -# rtpTsRelErr = (entry.rtpTs / rtpTimeScale - nextCutTime) / ((entry.rtpTs - lastRtpTs) / rtpTimeScale); -# double -# playTimeAbsErr = rtpTsRelErr * (entry.playTime - lastPlayTime) / playTimeScale; -# if (currBeginPlayTime < 0) { -# // found begin of item -# currBeginPlayTime = entry.playTime / playTimeScale - playTimeAbsErr; -# // now look for end of item -# nextCutTime += itemLength; -# } -# else { -# // found complete item -# double currEndPlayTime = entry.playTime / playTimeScale - playTimeAbsErr; -# mappedStart = currBeginPlayTime; -# mappedLength = currEndPlayTime - currBeginPlayTime; -# return true; -# } -# } -# lastRtpTs = entry.rtpTs; -# lastPlayTime = entry.playTime; -# } -# // check if item -# begin -# was -# found -# if (currBeginPlayTime < 0) -# { -# cerr << "Invalid item start position specified: " << itemStart << endl; -# -# -# return false; -# } -# // return item -# with missing end -# mappedStart = currBeginPlayTime; -# mappedLength = lastPlayTime / playTimeScale - currBeginPlayTime; + if not ivas_jbm: + # if sampling rate changed, adjust splits + fs_new = float(fs) + fs_old = float(in_fs) + relative_fs_change = fs_new / fs_old + new_splits = [0] + for split_i in splits: + new_splits.append(int(float(split_i) * relative_fs_change)) + splits = new_splits + else: + # adjust splits for jbm ivas conditions + # following code is based on jbmtrim.cpp script + rtpTimeScale = 1000 # in ms + playTimeScale = 1000 # in ms + new_splits = [None] * (len(splits) + 1) + + split_start = 0 + i = 0 + lastRtpTs = 0 + lastPlayTime = 0 + # find last JBM trace entry with lower or equal RTP time stamp + for j in range(tracefile.shape[0]): + entry = tracefile[j] + # ignore frames with unknown RTP time stamp or playout time + if entry[1] == -1 or entry[3] < 0: + continue + # check if the next position to cut is found + if entry[1] / rtpTimeScale >= split_start: + # interpolate between current and previous RTP time stamp to + # increase accuracy in case of DTX where lot of time stamps are missing + if (num := entry[1] / rtpTimeScale - split_start) == 0: + rtpTsRelErr = num + else: + rtpTsRelErr = num / (((entry[1] - lastRtpTs) / rtpTimeScale) + sys.float_info.epsilon) + playTimeAbsErr = rtpTsRelErr * (entry[3] - lastPlayTime) / playTimeScale + # found one split, save in list and search for next + new_splits[i] = entry[3] / playTimeScale - playTimeAbsErr + split_start = float(splits[i]) / float(fs) + i += 1 + if i >= len(new_splits): + break + lastRtpTs = entry[1] + lastPlayTime = entry[3] + + # check if all splits are found + if i < (len(new_splits) - 1): + raise ValueError("Error in item splitting with JBM compensation") + elif i < (len(new_splits)): + # catch item with missing end + warn("Last split after end of file for IVA JBM condition") + new_splits[i] = lastPlayTime / playTimeScale + + # set new values and use new sampling rate + splits = new_splits + for s in range(len(splits)): + splits[s] = int(np.floor(splits[s] * float(in_fs))) # check if last split ending coincides with last sample of signal if splits[-1] > len(x): raise ValueError( f"Last split index {splits[-1]} is larger than the signal length {len(x)}" ) - elif splits[-1] < len(x): + elif (splits[-1] < len(x)) and not ivas_jbm: warn( f"Last split index {splits[-1]} is smaller than the signal length {len(x)}" ) - split_old = 0 split_signals = [] split_meta = [] - for idx, split in enumerate(splits): + for idx in range(len(splits)-1): # split - y = x[split_old:split, :] + y = x[splits[idx]:splits[idx+1], :] # windowing y = window(y) @@ -250,15 +269,14 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, logger: logg # split ISM metadata if out_fmt.startswith("ISM"): - # TODO: (treffehn) add error message if output is ism or masa and jbm was used split_meta_object = [] for obj_meta in meta: # compute number of frames per split - split_old_frames = int(split_old / IVAS_FRAME_LEN_MS / fs * 1000) - split_frames = int(split / IVAS_FRAME_LEN_MS / fs * 1000) + split_frames = int(splits[idx] / IVAS_FRAME_LEN_MS / fs * 1000) + split_next_frames = int(splits[idx+1] / IVAS_FRAME_LEN_MS / fs * 1000) # split - obj_meta = obj_meta[split_old_frames:split_frames, :] + obj_meta = obj_meta[split_frames:split_next_frames, :] # add signal to list split_meta_object.append(obj_meta) @@ -267,8 +285,6 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, logger: logg else: split_meta = repeat(None) - split_old = split - return split_signals, split_meta diff --git a/ivas_processing_scripts/processing/processing_splitting_scaling.py b/ivas_processing_scripts/processing/processing_splitting_scaling.py index 055bc1d5..fdbdde7b 100644 --- a/ivas_processing_scripts/processing/processing_splitting_scaling.py +++ b/ivas_processing_scripts/processing/processing_splitting_scaling.py @@ -210,8 +210,7 @@ class Processing_splitting_scaling(Processing): ) ) splits, split_names, split_fs = read_splits_file(splits_info_file) - # TODO: (treffehn) if jbm and ivas instead of noerror - if not noerror: + if self.ivas_jbm and not noerror: # read out tracefile with jbm info tracefile_info_file = Path(f"{in_file.with_suffix('').with_suffix('')}.tracefile.csv") tracefile_info = np.genfromtxt(tracefile_info_file, delimiter=";") @@ -220,7 +219,7 @@ class Processing_splitting_scaling(Processing): # split file file_splits, meta_splits = concat_teardown( - x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, logger + x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, self.ivas_jbm, logger ) # set new out_files -- GitLab From 54a89b56eecaa7b1de2b71785fdc31550020ae07 Mon Sep 17 00:00:00 2001 From: Treffehn Date: Tue, 19 Sep 2023 17:31:18 +0200 Subject: [PATCH 3/9] added wrapper for evs jbm processing --- examples/TEMPLATE.yml | 8 +- .../audiotools/wrappers/dlyerr_2_errpat.py | 265 ++++++++++++++++++ ivas_processing_scripts/bin/README.md | 1 + ivas_processing_scripts/binary_paths.yml | 2 + ivas_processing_scripts/processing/chains.py | 3 + ivas_processing_scripts/processing/evs.py | 21 +- 6 files changed, 290 insertions(+), 10 deletions(-) create mode 100644 ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py diff --git a/examples/TEMPLATE.yml b/examples/TEMPLATE.yml index bab59220..8a755076 100755 --- a/examples/TEMPLATE.yml +++ b/examples/TEMPLATE.yml @@ -139,9 +139,15 @@ input: # type: "JBM" ### JBM - ### REQUIRED: either error_pattern or error_profile + ### REQUIRED: either error_pattern (and errpatt_late_loss_rate or errpatt_delay and errpatt_seed for EVS) or error_profile ### delay error profile file # error_pattern: ".../dly_error_profile.dat" + ### Late loss rate in precent or EVS + # errpatt_late_loss_rate: 1 + ### Constant JBM delay in milliseconds for EVS + # errpatt_delay: 200 + ### Seed for error pattern shift in EVS JBM + # errpatt_seed: 0 ### Index of one of the existing delay error profile files to use (1-11) # error_profile: 5 ## nFramesPerPacket parameter for the network simulator; default = 1 diff --git a/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py new file mode 100644 index 00000000..dd8d9b04 --- /dev/null +++ b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py @@ -0,0 +1,265 @@ +#!/usr/bin/env python3 + +# +# (C) 2022-2023 IVAS codec Public Collaboration with portions copyright Dolby International AB, Ericsson AB, +# Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V., Huawei Technologies Co. LTD., +# Koninklijke Philips N.V., Nippon Telegraph and Telephone Corporation, Nokia Technologies Oy, Orange, +# Panasonic Holdings Corporation, Qualcomm Technologies, Inc., VoiceAge Corporation, and other +# contributors to this repository. All Rights Reserved. +# +# This software is protected by copyright law and by international treaties. +# The IVAS codec Public Collaboration consisting of Dolby International AB, Ericsson AB, +# Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V., Huawei Technologies Co. LTD., +# Koninklijke Philips N.V., Nippon Telegraph and Telephone Corporation, Nokia Technologies Oy, Orange, +# Panasonic Holdings Corporation, Qualcomm Technologies, Inc., VoiceAge Corporation, and other +# contributors to this repository retain full ownership rights in their respective contributions in +# the software. This notice grants no license of any kind, including but not limited to patent +# license, nor is any license granted by implication, estoppel or otherwise. +# +# Contributors are required to enter into the IVAS codec Public Collaboration agreement before making +# contributions. +# +# This software is provided "AS IS", without any express or implied warranties. The software is in the +# development stage. It is intended exclusively for experts who have experience with such software and +# solely for the purpose of inspection. All implied warranties of non-infringement, merchantability +# and fitness for a particular purpose are hereby disclaimed and excluded. +# +# Any dispute, controversy or claim arising under or in relation to providing this software shall be +# submitted to and settled by the final, binding jurisdiction of the courts of Munich, Germany in +# accordance with the laws of the Federal Republic of Germany excluding its conflict of law rules and +# the United Nations Convention on Contracts on the International Sales of Goods. +# + +import os.path +from pathlib import Path +from typing import Optional, Union +from warnings import warn + +from ivas_processing_scripts.audiotools.wrappers.networkSimulator import LIST_JBM_PROFILES, ERROR_PATTERNS_DIR +from ivas_processing_scripts.constants import DEFAULT_CONFIG_BINARIES +from ivas_processing_scripts.utils import find_binary, run +from ivas_processing_scripts.audiotools.wrappers.eid_xor import eid_xor + + +def dlyerr_2_errpat( + dlyerr_pattern: Union[str, Path], + fer_pattern: Union[str, Path], + length: Optional[int] = None, + shift: Optional[int] = None, + num_frames_packet: Optional[int] = None, + late_loss_rate: Optional[int] = None, + flag_byte: Optional[bool] = None, + flag_word: Optional[bool] = None, + flag_lf: Optional[bool] = None, + delay: Optional[int] = None, +) -> None: + """ + Wrapper for dlyerr_2_errpat binary to convert delay and error profiles to frame error patterns for EVS JBM + bitstream processing + + Parameters + ---------- + dlyerr_pattern: Union[str, Path] + Path to delay and error pattern file + fer_pattern: Union[str, Path] + Path to frame error pattern file file + length: Optional[int] = None + length in frames + shift: Optional[int] = None + shift/offset in frames + num_frames_packet: Optional[int] = None + Number of frames per packet (1 or 2) + late_loss_rate: Optional[int] = None + Late loss rate in percent + flag_byte: Optional[bool] = None + Flag for using byte-oriented G.192 format (0x21 okay, 0x20 lost) + flag_word: Optional[bool] = None + Flag for using word-oriented G.192 format (0x6b21 okay, 0x6b20 lost) + flag_lf: Optional[bool] = None + Flag for using LF for text format to have one entry per line + delay: Optional[int] = None + Constant JBM delay in milliseconds + """ + + # find binary + if "dlyerr_2_errpat" in DEFAULT_CONFIG_BINARIES["binary_paths"]: + binary = find_binary( + DEFAULT_CONFIG_BINARIES["binary_paths"]["dlyerr_2_errpat"].name, + binary_path=DEFAULT_CONFIG_BINARIES["binary_paths"]["dlyerr_2_errpat"].parent, + ) + else: + binary = find_binary("dlyerr_2_errpat") + + # check for valid inputs + if not Path(dlyerr_pattern).is_file(): + raise ValueError( + f"Delay and error pattern file {dlyerr_pattern} for bitstream processing does not exist" + ) + if delay is not None and late_loss_rate is not None: + raise ValueError("Can't scpecify delay and late loss rate for dlyerr_2_err tool but only one of them") + + # set up command line + cmd = [ + str(binary), + "-i", # input file + str(dlyerr_pattern), + "-o", # output file + str(fer_pattern), + ] + + if length is not None: + cmd.extend(["-L", str(length)]) + if shift is not None: + cmd.extend(["-s", str(shift)]) + if num_frames_packet is not None: + cmd.extend(["-f", str(num_frames_packet)]) + if late_loss_rate is not None: + cmd.extend(["-l", str(late_loss_rate)]) + if flag_byte is not None: + cmd.extend(["-b", str(flag_byte)]) + if flag_word is not None: + cmd.extend(["-w", str(flag_word)]) + if flag_lf is not None: + cmd.extend(["-c", str(flag_lf)]) + if delay is not None: + cmd.extend(["-d", str(delay)]) + + # run command + run(cmd) + + return + + +def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpatt_late_loss_rate, errpatt_delay, errpatt_seed, errpatt_frames_packet): + + # convert delay and error profile + delay = None + num_frames_packet = None + shift = None + late_loss_rate = None + length = None + flag_word = True + + if error_pattern is not None: + # if error pattern and parameter are specified + delay = errpatt_delay + late_loss_rate = errpatt_late_loss_rate + num_frames_packet = errpatt_frames_packet + shift = 0 # TODO: (treffehn) compute offset with random and master seed (also for ivas jbm) + dlyerr_pattern = error_pattern + elif error_profile is not None: + # if eror profile number is given + if error_profile == 1 or error_profile == 2 or error_profile == 3: + delay = 200 + num_frames_packet = 1 + elif error_profile == 4 or error_profile == 6: + late_loss_rate = 1 + num_frames_packet = 1 + elif error_profile == 5: + late_loss_rate = 1 + num_frames_packet = 2 + elif error_profile == 7 or error_profile == 8 or error_profile == 9: + delay = 200 + num_frames_packet = 1 + length = 8000 + elif error_profile == 10: + late_loss_rate = 1 + num_frames_packet = 1 + length = 8000 + else: + # TODO: (treffehn) what to do? + raise ValueError("JBM error profile number not an integer between 1 and 10") + shift = 0 # TODO: (treffehn) compute offset with random and master seed (also for ivas jbm) + if error_profile in LIST_JBM_PROFILES: + dlyerr_pattern = ERROR_PATTERNS_DIR.joinpath( + f"dly_error_profile_{error_profile}.dat" + ) + else: + raise ValueError( + f"JBM profile number {error_profile} does not exist, should be between {LIST_JBM_PROFILES[0]} and {LIST_JBM_PROFILES[-1]}" + ) + + fer_pattern = Path(bitstream).with_suffix(".evs_jbm_fer.192") + + dlyerr_2_errpat( + dlyerr_pattern=dlyerr_pattern, + fer_pattern=fer_pattern, + delay=delay, + num_frames_packet=num_frames_packet, + flag_word=flag_word, + shift=shift, + late_loss_rate=late_loss_rate, + length=length, + ) + + # apply FER pattern with eid-xor + # TODO (treffehn) + # eid_xor() + + +def validate_evs_jbm( + error_pattern: Optional[Union[Path, str]] = None, + error_profile: Optional[int] = None, + errpatt_late_loss_rate: Optional[int] = None, + errpatt_delay: Optional[int] = None, + errpatt_seed: Optional[int] = None, + n_frames_per_packet: Optional[int] = None, +) -> None: + """ + Validate settings for the EVS JBM processing + + Parameters + ---------- + error_pattern: Optional[Union[Path, str]] + Path to existing error pattern + error_profile: Optional[int] + Index of existing error pattern + errpatt_late_loss_rate: Optional[int] + Late loss rate in precent or EVS + errpatt_delay: Optional[int] + Constant JBM delay in milliseconds for EVS + errpatt_seed: Optional[int] + Seed for error pattern shift in EVS JBM + n_frames_per_packet: Optional[int] + Number of frames per paket + """ + + if "dlyerr_2_errpat" in DEFAULT_CONFIG_BINARIES["binary_paths"]: + binary = find_binary( + DEFAULT_CONFIG_BINARIES["binary_paths"]["dlyerr_2_errpat"].name, + binary_path=DEFAULT_CONFIG_BINARIES["binary_paths"][ + "dlyerr_2_errpat" + ].parent, + ) + else: + binary = find_binary("dlyerr_2_errpat") + + if binary is None: + raise FileNotFoundError( + "The dlyerr_2_errpat binary for EVS JBM conditions was not found! Please check the configuration." + ) + if error_pattern is not None: + if not Path(error_pattern).exists(): + raise FileNotFoundError( + f"The EVS JBM error profile file {error_pattern} was not found! Please check the configuration." + ) + if error_profile is not None: + raise ValueError( + "JBM pattern and JBM profile number are specified for bitstream processing. Can't use both! Please check the configuration." + ) + if errpatt_late_loss_rate is not None and errpatt_delay is not None: + raise ValueError("For EVS JBM conditions with error pattern only late loss rate OR delay has to be specified, not both!") + if errpatt_late_loss_rate is None and errpatt_delay is None: + raise ValueError("For EVS JBM conditions with error pattern either late loss rate or delay has to be specified!") + if errpatt_seed is None: + warn("No seed was specified for EVS JBM offset -> Use 0") + elif error_profile is not None: + if error_profile not in LIST_JBM_PROFILES: + raise ValueError( + f"JBM profile number {error_profile} does not exist, should be between {LIST_JBM_PROFILES[0]} and {LIST_JBM_PROFILES[-1]}" + ) + if n_frames_per_packet is not None and n_frames_per_packet not in [1, 2]: + raise ValueError( + f"n_frames_per_paket is {n_frames_per_packet}. Should be 1 or 2. Please check your configuration." + ) + return diff --git a/ivas_processing_scripts/bin/README.md b/ivas_processing_scripts/bin/README.md index 4da26590..494ed5ed 100755 --- a/ivas_processing_scripts/bin/README.md +++ b/ivas_processing_scripts/bin/README.md @@ -14,3 +14,4 @@ Necessary additional executables: | JBM network simulator | networkSimulator_g192 | https://www.3gpp.org/ftp/tsg_sa/WG4_CODEC/TSGS4_76/docs/S4-131277.zip | | MASA rendering (also used in loudness measurement of MASA items) | masaRenderer | https://www.3gpp.org/ftp/TSG_SA/WG4_CODEC/TSGS4_122_Athens/Docs/S4-230221.zip | | EVS reference conditions | EVS_cod, EVS_dec | https://www.3gpp.org/ftp/Specs/archive/26_series/26.443/26443-h00.zip | +| EVS JBM conditions | dlyerr_2_errpat | http://ftp.3gpp.org/tsg_sa/WG4_CODEC/TSGS4_70/Docs/S4-121077.zip | \ No newline at end of file diff --git a/ivas_processing_scripts/binary_paths.yml b/ivas_processing_scripts/binary_paths.yml index 62abb593..1c810062 100644 --- a/ivas_processing_scripts/binary_paths.yml +++ b/ivas_processing_scripts/binary_paths.yml @@ -30,3 +30,5 @@ # masaRenderer: "path/to/binary/masaRenderer" # ### Binary for reverberation # reverb: "path/to/binary/reverb" +# ### Binary for EVS JBM error pattern conversion tool +# dlyerr_2_errpat: "path/to/binary/dlyerr_2_errpat" diff --git a/ivas_processing_scripts/processing/chains.py b/ivas_processing_scripts/processing/chains.py index 1b6d9341..4cb6c9c8 100755 --- a/ivas_processing_scripts/processing/chains.py +++ b/ivas_processing_scripts/processing/chains.py @@ -302,6 +302,9 @@ def get_processing_chain( "error_pattern": get_abs_path( tx_cfg_tmp.get("error_pattern", None) ), + "errpatt_late_loss_rate": tx_cfg_tmp.get("errpatt_late_loss_rate", None), + "errpatt_delay": tx_cfg_tmp.get("errpatt_delay", None), + "errpatt_seed": tx_cfg_tmp.get("errpatt_seed", None), "error_profile": tx_cfg_tmp.get("error_profile", None), "n_frames_per_packet": tx_cfg_tmp.get("n_frames_per_packet", None), } diff --git a/ivas_processing_scripts/processing/evs.py b/ivas_processing_scripts/processing/evs.py index fe96b379..ebc2d870 100755 --- a/ivas_processing_scripts/processing/evs.py +++ b/ivas_processing_scripts/processing/evs.py @@ -50,10 +50,8 @@ from ivas_processing_scripts.audiotools.wrappers.eid_xor import ( create_and_apply_error_pattern, validate_error_pattern_application, ) -from ivas_processing_scripts.audiotools.wrappers.networkSimulator import ( - apply_network_simulator, - validate_network_simulator, -) +from ivas_processing_scripts.audiotools.wrappers.dlyerr_2_errpat import validate_evs_jbm +from ivas_processing_scripts.audiotools.wrappers.dlyerr_2_errpat import evs_jbm from ivas_processing_scripts.processing.processing import Processing from ivas_processing_scripts.utils import apply_func_parallel, run @@ -120,9 +118,12 @@ class EVS(Processing): # existence of error pattern files (if given) already here if self.tx is not None: if self.tx.get("type", None) == "JBM": - validate_network_simulator( + validate_evs_jbm( self.tx["error_pattern"], self.tx["error_profile"], + self.tx["errpatt_late_loss_rate"], + self.tx["errpatt_delay"], + self.tx["errpatt_seed"], self.tx["n_frames_per_packet"], ) elif self.tx.get("type", None) == "FER": @@ -377,16 +378,18 @@ class EVS(Processing): logger: Optional[logging.Logger] = None, ) -> Tuple[Union[Path, str], bool]: if self.tx is not None: - # TODO: (treffehn) change this part -> delay error profile to FER profile and apply if self.tx["type"] == "JBM": bs, ext = os.path.splitext(bitstream) bitstream_processed = Path(f"{bs}_processed{ext}") - logger.debug(f"Network simulator {bitstream} -> {bitstream_processed}") - apply_network_simulator( + logger.debug(f"EVS JBM processing {bitstream} -> {bitstream_processed}") + evs_jbm( bitstream, bitstream_processed, - self.tx["error_pattern"], self.tx["error_profile"], + self.tx["error_pattern"], + self.tx["errpatt_late_loss_rate"], + self.tx["errpatt_delay"], + self.tx["errpatt_seed"], self.tx["n_frames_per_packet"], ) voip = True -- GitLab From 8223fb7c091e61959537caa4b30c4984fbaf98a5 Mon Sep 17 00:00:00 2001 From: Treffehn Date: Thu, 21 Sep 2023 11:48:54 +0200 Subject: [PATCH 4/9] fixed evs jbm processing --- examples/TEMPLATE.yml | 2 +- .../audiotools/wrappers/dlyerr_2_errpat.py | 4 +--- .../audiotools/wrappers/networkSimulator.py | 2 +- ivas_processing_scripts/processing/evs.py | 21 +++++-------------- 4 files changed, 8 insertions(+), 21 deletions(-) diff --git a/examples/TEMPLATE.yml b/examples/TEMPLATE.yml index 8a755076..d10c115c 100755 --- a/examples/TEMPLATE.yml +++ b/examples/TEMPLATE.yml @@ -148,7 +148,7 @@ input: # errpatt_delay: 200 ### Seed for error pattern shift in EVS JBM # errpatt_seed: 0 - ### Index of one of the existing delay error profile files to use (1-11) + ### Index of one of the existing delay error profile files to use (1-10) # error_profile: 5 ## nFramesPerPacket parameter for the network simulator; default = 1 # n_frames_per_packet: 2 diff --git a/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py index dd8d9b04..3db5383b 100644 --- a/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py +++ b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py @@ -167,7 +167,6 @@ def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpat num_frames_packet = 1 length = 8000 else: - # TODO: (treffehn) what to do? raise ValueError("JBM error profile number not an integer between 1 and 10") shift = 0 # TODO: (treffehn) compute offset with random and master seed (also for ivas jbm) if error_profile in LIST_JBM_PROFILES: @@ -193,8 +192,7 @@ def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpat ) # apply FER pattern with eid-xor - # TODO (treffehn) - # eid_xor() + eid_xor(fer_pattern, bitstream, bitstream_processed) def validate_evs_jbm( diff --git a/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py b/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py index 3c116979..20cd227f 100644 --- a/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py +++ b/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py @@ -37,7 +37,7 @@ from typing import Optional, Union from ivas_processing_scripts.constants import DEFAULT_CONFIG_BINARIES from ivas_processing_scripts.utils import find_binary, run -LIST_JBM_PROFILES = range(12) +LIST_JBM_PROFILES = range(11) ERROR_PATTERNS_DIR = Path(__file__).parent.parent.parent.joinpath("dly_error_profiles") diff --git a/ivas_processing_scripts/processing/evs.py b/ivas_processing_scripts/processing/evs.py index ebc2d870..0e1565b7 100755 --- a/ivas_processing_scripts/processing/evs.py +++ b/ivas_processing_scripts/processing/evs.py @@ -228,14 +228,12 @@ class EVS(Processing): None, show_progress=False, ) - voip = [scb[1] for scb in split_chan_bs] - split_chan_bs = [scb[0] for scb in split_chan_bs] # run all decoders twice with and without bitstream errors logger.debug(f"Running EVS decoders for {out_file.stem.split('.')[0]}") apply_func_parallel( self.dec, - zip(split_chan_bs, split_chan_out, voip, repeat(logger)), + zip(split_chan_bs, split_chan_out, repeat(logger)), None, "mt" if self.multiprocessing else None, show_progress=False, @@ -246,7 +244,6 @@ class EVS(Processing): zip( split_chan_bs_unprocessed, split_chan_out_noerror, - repeat(False), repeat(logger), ), None, @@ -376,7 +373,7 @@ class EVS(Processing): in_file: Union[Path, str], bitstream: Path, logger: Optional[logging.Logger] = None, - ) -> Tuple[Union[Path, str], bool]: + ) -> Union[Path, str]: if self.tx is not None: if self.tx["type"] == "JBM": bs, ext = os.path.splitext(bitstream) @@ -392,8 +389,7 @@ class EVS(Processing): self.tx["errpatt_seed"], self.tx["n_frames_per_packet"], ) - voip = True - return bitstream_processed, voip + return bitstream_processed elif self.tx["type"] == "FER": bs, ext = os.path.splitext(bitstream) @@ -417,27 +413,20 @@ class EVS(Processing): master_seed=self.tx["master_seed"], prerun_seed=self.tx["prerun_seed"], ) - voip = False - return bitstream_processed, voip + return bitstream_processed else: - voip = False - return bitstream, voip + return bitstream def dec( self, bitstream: Path, out_pcm_file: Path, - voip: bool = False, logger: Optional[logging.Logger] = None, ) -> None: cmd = [self.dec_bin] if self._use_wine: cmd.insert(0, "wine") - # add -voip cmdline option to the decoder - if voip: - cmd.extend(["-voip"]) - if self.dec_opts: cmd.extend(self.dec_opts) -- GitLab From 5f8001c5382374bf8b43537d0a50ed494e4c5a84 Mon Sep 17 00:00:00 2001 From: Treffehn Date: Thu, 21 Sep 2023 13:48:54 +0200 Subject: [PATCH 5/9] added offset for jbm error pattern --- examples/TEMPLATE.yml | 6 ++--- .../audiotools/wrappers/dlyerr_2_errpat.py | 18 ++++++++++--- .../audiotools/wrappers/networkSimulator.py | 27 ++++++++++++++++--- ivas_processing_scripts/processing/chains.py | 3 +++ ivas_processing_scripts/processing/evs.py | 1 + ivas_processing_scripts/processing/ivas.py | 2 ++ .../processing/processing.py | 2 +- 7 files changed, 49 insertions(+), 10 deletions(-) diff --git a/examples/TEMPLATE.yml b/examples/TEMPLATE.yml index d10c115c..38d01863 100755 --- a/examples/TEMPLATE.yml +++ b/examples/TEMPLATE.yml @@ -139,19 +139,19 @@ input: # type: "JBM" ### JBM - ### REQUIRED: either error_pattern (and errpatt_late_loss_rate or errpatt_delay and errpatt_seed for EVS) or error_profile + ### REQUIRED: either error_pattern (and errpatt_late_loss_rate or errpatt_delay) or error_profile ### delay error profile file # error_pattern: ".../dly_error_profile.dat" ### Late loss rate in precent or EVS # errpatt_late_loss_rate: 1 ### Constant JBM delay in milliseconds for EVS # errpatt_delay: 200 - ### Seed for error pattern shift in EVS JBM - # errpatt_seed: 0 ### Index of one of the existing delay error profile files to use (1-10) # error_profile: 5 ## nFramesPerPacket parameter for the network simulator; default = 1 # n_frames_per_packet: 2 + ### Seed for error pattern shift in JBM; default = 0 or determined by profile number + # errpatt_seed: 0 ### FER ### REQUIRED: either error_pattern or error_rate diff --git a/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py index 3db5383b..9ea3e2b0 100644 --- a/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py +++ b/ivas_processing_scripts/audiotools/wrappers/dlyerr_2_errpat.py @@ -39,6 +39,8 @@ from ivas_processing_scripts.audiotools.wrappers.networkSimulator import LIST_JB from ivas_processing_scripts.constants import DEFAULT_CONFIG_BINARIES from ivas_processing_scripts.utils import find_binary, run from ivas_processing_scripts.audiotools.wrappers.eid_xor import eid_xor +from ivas_processing_scripts.audiotools.wrappers.random_seed import random_seed +from ivas_processing_scripts.audiotools.wrappers.networkSimulator import length_pattern def dlyerr_2_errpat( @@ -130,7 +132,7 @@ def dlyerr_2_errpat( return -def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpatt_late_loss_rate, errpatt_delay, errpatt_seed, errpatt_frames_packet): +def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpatt_late_loss_rate, errpatt_delay, errpatt_seed, errpatt_frames_packet, master_seed): # convert delay and error profile delay = None @@ -139,14 +141,19 @@ def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpat late_loss_rate = None length = None flag_word = True + if errpatt_seed is None: + errpatt_seed = 0 if error_pattern is not None: # if error pattern and parameter are specified delay = errpatt_delay late_loss_rate = errpatt_late_loss_rate num_frames_packet = errpatt_frames_packet - shift = 0 # TODO: (treffehn) compute offset with random and master seed (also for ivas jbm) dlyerr_pattern = error_pattern + # compute offset of error pattern + len_pattern = length_pattern(dlyerr_pattern) + shift = random_seed((0, len_pattern - 1), master_seed, errpatt_seed) + elif error_profile is not None: # if eror profile number is given if error_profile == 1 or error_profile == 2 or error_profile == 3: @@ -168,7 +175,7 @@ def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpat length = 8000 else: raise ValueError("JBM error profile number not an integer between 1 and 10") - shift = 0 # TODO: (treffehn) compute offset with random and master seed (also for ivas jbm) + if error_profile in LIST_JBM_PROFILES: dlyerr_pattern = ERROR_PATTERNS_DIR.joinpath( f"dly_error_profile_{error_profile}.dat" @@ -178,6 +185,10 @@ def evs_jbm(bitstream, bitstream_processed, error_profile, error_pattern, errpat f"JBM profile number {error_profile} does not exist, should be between {LIST_JBM_PROFILES[0]} and {LIST_JBM_PROFILES[-1]}" ) + # compute offset of error pattern + len_pattern = length_pattern(dlyerr_pattern) + shift = random_seed((0, len_pattern - 1), master_seed, error_profile, False) + fer_pattern = Path(bitstream).with_suffix(".evs_jbm_fer.192") dlyerr_2_errpat( @@ -261,3 +272,4 @@ def validate_evs_jbm( f"n_frames_per_paket is {n_frames_per_packet}. Should be 1 or 2. Please check your configuration." ) return + diff --git a/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py b/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py index 20cd227f..aff39599 100644 --- a/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py +++ b/ivas_processing_scripts/audiotools/wrappers/networkSimulator.py @@ -33,9 +33,11 @@ import logging from pathlib import Path from typing import Optional, Union +from warnings import warn from ivas_processing_scripts.constants import DEFAULT_CONFIG_BINARIES from ivas_processing_scripts.utils import find_binary, run +from ivas_processing_scripts.audiotools.wrappers.random_seed import random_seed LIST_JBM_PROFILES = range(11) ERROR_PATTERNS_DIR = Path(__file__).parent.parent.parent.joinpath("dly_error_profiles") @@ -82,6 +84,8 @@ def validate_network_simulator( raise ValueError( "JBM pattern and JBM profile number are specified for bitstream processing. Can't use both! Please check the configuration." ) + if errpatt_seed is None: + raise warn("No error pattern seed specified for JBM offset -> use 0") elif error_profile is not None: if error_profile not in LIST_JBM_PROFILES: raise ValueError( @@ -166,7 +170,8 @@ def apply_network_simulator( error_pattern: Optional[Union[Path, str]] = None, error_profile: Optional[int] = None, n_frames_per_packet: Optional[int] = None, - offset: Optional[int] = 0, + master_seed: Optional[int] = 0, + errpatt_seed: Optional[int] = 0, logger: Optional[logging.Logger] = None, ) -> None: """ @@ -184,8 +189,10 @@ def apply_network_simulator( Index of existing error pattern n_frames_per_packet: Optional[int] Number of frames per paket - offset: Optional[int] - delay offset + master_seed: Optional[int] + Seed to compute delay offset + errpatt_seed: Optional[int] + Seed to compute delay offset logger: Optional[logging.Logger] logger """ @@ -215,9 +222,23 @@ def apply_network_simulator( if error_profile is not None and error_profile == 5: n_frames_per_packet = 2 + # compute offset of error pattern + len_pattern = length_pattern(error_pattern) + if error_profile: + offset = random_seed((0, len_pattern - 1), master_seed, error_profile, False) + else: + offset = random_seed((0, len_pattern - 1), master_seed, errpatt_seed, False) + # apply error pattern network_simulator( error_pattern, in_bitstream, out_bitstream, n_frames_per_packet, offset, logger ) return + + +def length_pattern(path_pattern): + with open(path_pattern, 'r') as f: + p = f.readlines() + length = len(p) + return length diff --git a/ivas_processing_scripts/processing/chains.py b/ivas_processing_scripts/processing/chains.py index 4cb6c9c8..3b2ca615 100755 --- a/ivas_processing_scripts/processing/chains.py +++ b/ivas_processing_scripts/processing/chains.py @@ -307,6 +307,7 @@ def get_processing_chain( "errpatt_seed": tx_cfg_tmp.get("errpatt_seed", None), "error_profile": tx_cfg_tmp.get("error_profile", None), "n_frames_per_packet": tx_cfg_tmp.get("n_frames_per_packet", None), + "master_seed": cfg.master_seed, } else: raise ValueError( @@ -398,6 +399,8 @@ def get_processing_chain( "error_pattern": tx_cfg_tmp.get("error_rate", None), "error_profile": tx_cfg_tmp.get("error_profile", None), "n_frames_per_packet": tx_cfg_tmp.get("n_frames_per_packet", None), + "master_seed": cfg.master_seed, + "errpatt_seed": tx_cfg_tmp.get("errpatt_seed", None), } ivas_jbm = True else: diff --git a/ivas_processing_scripts/processing/evs.py b/ivas_processing_scripts/processing/evs.py index 0e1565b7..8fa048d3 100755 --- a/ivas_processing_scripts/processing/evs.py +++ b/ivas_processing_scripts/processing/evs.py @@ -388,6 +388,7 @@ class EVS(Processing): self.tx["errpatt_delay"], self.tx["errpatt_seed"], self.tx["n_frames_per_packet"], + self.tx["master_seed"], ) return bitstream_processed diff --git a/ivas_processing_scripts/processing/ivas.py b/ivas_processing_scripts/processing/ivas.py index e4f96a1d..17ad9c6f 100755 --- a/ivas_processing_scripts/processing/ivas.py +++ b/ivas_processing_scripts/processing/ivas.py @@ -240,6 +240,8 @@ class IVAS(Processing): self.tx["error_pattern"], self.tx["error_profile"], self.tx["n_frames_per_packet"], + self.tx["master_seed"], + self.tx["errpatt_seed"], logger=logger, ) voip = True diff --git a/ivas_processing_scripts/processing/processing.py b/ivas_processing_scripts/processing/processing.py index 29b841b4..c87c50ab 100755 --- a/ivas_processing_scripts/processing/processing.py +++ b/ivas_processing_scripts/processing/processing.py @@ -237,7 +237,7 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, lo raise ValueError("Error in item splitting with JBM compensation") elif i < (len(new_splits)): # catch item with missing end - warn("Last split after end of file for IVA JBM condition") + warn("Last split after end of file for IVAS JBM condition") new_splits[i] = lastPlayTime / playTimeScale # set new values and use new sampling rate -- GitLab From 9d039d269ce0478bb6576514d52f9cfb1231d96b Mon Sep 17 00:00:00 2001 From: Treffehn Date: Thu, 21 Sep 2023 16:30:05 +0200 Subject: [PATCH 6/9] check tracefile propertied --- .../processing_splitting_scaling.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/ivas_processing_scripts/processing/processing_splitting_scaling.py b/ivas_processing_scripts/processing/processing_splitting_scaling.py index fdbdde7b..44decc61 100644 --- a/ivas_processing_scripts/processing/processing_splitting_scaling.py +++ b/ivas_processing_scripts/processing/processing_splitting_scaling.py @@ -214,6 +214,7 @@ class Processing_splitting_scaling(Processing): # read out tracefile with jbm info tracefile_info_file = Path(f"{in_file.with_suffix('').with_suffix('')}.tracefile.csv") tracefile_info = np.genfromtxt(tracefile_info_file, delimiter=";") + validate_tracefile(tracefile_info) else: tracefile_info = None @@ -306,3 +307,44 @@ def measure_loudness(file_splits, out_fmt, fs, loudness, loudness_fmt, meta, log ) scaling_splits.append(scale_factor) return scaling_splits + + +def validate_tracefile(tracefile): + + prevPlayTime = -1 + prevRtpTs = -1 + for j in range(tracefile.shape[0]): + entry = tracefile[j] + + # require playTime > 0 and strictly increasing + if entry[3] < 0: + raise ValueError(f"Error in JBM trace file at line {j}: playTime < 0") + if entry[3] <= prevPlayTime: + raise ValueError(f"Error in JBM trace file at line {j}: playTime not strictly increasing") + prevPlayTime = entry[3] + + # require playTime > rcvTime + if entry[2] >= 0 and entry[3] < entry[2]: + raise ValueError(f"Error in JBM trace file at line {j}: playTime < rcvTime") + + # rtpTs must be strictly increasing + if entry[1] >= 0: + if entry[1] == prevRtpTs: + raise ValueError(f"Error in JBM trace file at line {j}: duplicated rtpTs found") + # TODO: (treffehn) include RTP time stamp overflow handling? + # else if (entry.rtpTs + rtpTsExtension < prevRtpTs) { + # if (entry.rtpTs + rtpTsExtension + (1LL << 32) - prevRtpTs < + # prevRtpTs - (entry.rtpTs + rtpTsExtension)) { + # // overflow of 32bit RTP time stamp, increase extension + # rtpTsExtension += 1LL << 32; + # } + # else { + # fprintf(stderr, "Error in JBM trace file at line %d: rtpTs not strictly increasing\n", entry.line); + # return false; + # } + # } + # entry.rtpTs += rtpTsExtension; + # prevRtpTs = entry.rtpTs; + # } + elif entry[1] != -1: + raise ValueError(f"Error in JBM trace file at line {j}: unexpected rtpTs") -- GitLab From 1b2c67c17eb614b3a6f7a5f90fb1ca5edf96fe6b Mon Sep 17 00:00:00 2001 From: Treffehn Date: Fri, 22 Sep 2023 13:39:45 +0200 Subject: [PATCH 7/9] fix small bug in jbm ivas flag --- .../processing/processing_splitting_scaling.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/ivas_processing_scripts/processing/processing_splitting_scaling.py b/ivas_processing_scripts/processing/processing_splitting_scaling.py index 44decc61..ea5afabf 100644 --- a/ivas_processing_scripts/processing/processing_splitting_scaling.py +++ b/ivas_processing_scripts/processing/processing_splitting_scaling.py @@ -219,8 +219,14 @@ class Processing_splitting_scaling(Processing): tracefile_info = None # split file + if self.ivas_jbm and not noerror: + # only use flag if IVAS JBM condition but not the loudness reference without error is processed + ivas_jbm_splitting_flag = True + else: + ivas_jbm_splitting_flag = False + file_splits, meta_splits = concat_teardown( - x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, self.ivas_jbm, logger + x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, ivas_jbm_splitting_flag, logger ) # set new out_files -- GitLab From 65d967c90b1b3f10530995555d03774dcbb7817e Mon Sep 17 00:00:00 2001 From: Treffehn Date: Fri, 22 Sep 2023 13:58:11 +0200 Subject: [PATCH 8/9] write out file with splitting info after adjustment --- ivas_processing_scripts/processing/processing.py | 2 +- .../processing/processing_splitting_scaling.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/ivas_processing_scripts/processing/processing.py b/ivas_processing_scripts/processing/processing.py index c87c50ab..a48c7fb6 100755 --- a/ivas_processing_scripts/processing/processing.py +++ b/ivas_processing_scripts/processing/processing.py @@ -285,7 +285,7 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, lo else: split_meta = repeat(None) - return split_signals, split_meta + return split_signals, split_meta, splits def preprocess(cfg, logger): diff --git a/ivas_processing_scripts/processing/processing_splitting_scaling.py b/ivas_processing_scripts/processing/processing_splitting_scaling.py index ea5afabf..83a4ac16 100644 --- a/ivas_processing_scripts/processing/processing_splitting_scaling.py +++ b/ivas_processing_scripts/processing/processing_splitting_scaling.py @@ -225,10 +225,17 @@ class Processing_splitting_scaling(Processing): else: ivas_jbm_splitting_flag = False - file_splits, meta_splits = concat_teardown( + file_splits, meta_splits, new_splits = concat_teardown( x, splits, self.out_fmt, fs, split_fs, in_meta, tracefile_info, ivas_jbm_splitting_flag, logger ) + # write out new splits + new_splits_info_file = in_file.parent.joinpath("undo_concat_splitting.txt") + with open(new_splits_info_file, "w") as f: + print(", ".join([str(s) for s in new_splits]), file=f) + print(", ".join([str(sn) for sn in split_names]), file=f) + print(f"{fs}", file=f) + # set new out_files if noerror: out_files = [ -- GitLab From f1976f0e29e8b7189ae03374fee58d479a14a13f Mon Sep 17 00:00:00 2001 From: Treffehn Date: Tue, 26 Sep 2023 15:57:38 +0200 Subject: [PATCH 9/9] added and subtracted one in computation for jbm splitting to make more similar to jbmtrim --- ivas_processing_scripts/processing/processing.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ivas_processing_scripts/processing/processing.py b/ivas_processing_scripts/processing/processing.py index a48c7fb6..b8fda56b 100755 --- a/ivas_processing_scripts/processing/processing.py +++ b/ivas_processing_scripts/processing/processing.py @@ -204,7 +204,7 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, lo playTimeScale = 1000 # in ms new_splits = [None] * (len(splits) + 1) - split_start = 0 + split_start = 1 / float(fs) i = 0 lastRtpTs = 0 lastPlayTime = 0 @@ -225,7 +225,8 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, lo playTimeAbsErr = rtpTsRelErr * (entry[3] - lastPlayTime) / playTimeScale # found one split, save in list and search for next new_splits[i] = entry[3] / playTimeScale - playTimeAbsErr - split_start = float(splits[i]) / float(fs) + # get next split marker; add one to make computation more similar to jbmtrim + split_start = (float(splits[i]) + 1) / float(fs) i += 1 if i >= len(new_splits): break @@ -243,7 +244,8 @@ def concat_teardown(x, splits, out_fmt, fs, in_fs, meta, tracefile, ivas_jbm, lo # set new values and use new sampling rate splits = new_splits for s in range(len(splits)): - splits[s] = int(np.floor(splits[s] * float(in_fs))) + # subtract one again (was only used to make computation more similar to jbmtrim) + splits[s] = int(np.floor(splits[s] * float(in_fs))) - 1 # check if last split ending coincides with last sample of signal if splits[-1] > len(x): -- GitLab