Commit 74aa8c4b authored by Archit Tamarapu's avatar Archit Tamarapu
Browse files

Merge branch 'update-python-requirements' into 'main'

Update required python and package versions

See merge request !234
parents 29caa04c c70c9194
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -36,7 +36,7 @@ the United Nations Convention on Contracts on the International Sales of Goods.

## Requirements

- Python >= 3.9
- Python 3.13.7 (may work with lower python versions, not guaranteed)
- numpy, scipy and pyyaml ([see requirements.txt](./requirements.txt))

---
+27 −9
Original line number Diff line number Diff line
@@ -4,7 +4,6 @@ import re
import shutil
from pathlib import Path


# table 5 from IVAS-8b
TABLE_5 = {
    "s01": {
@@ -66,7 +65,9 @@ def repo_root_dir() -> Path:
def copy_final_items(verbose: bool = False):
    files_copied = 0

    for exp_dir in map(Path, glob.glob(str(repo_root_dir() / "experiments/characterization/P800-*/"))):
    for exp_dir in map(
        Path, glob.glob(str(repo_root_dir() / "experiments/characterization/P800-*/"))
    ):
        exp_dir = Path(exp_dir)

        final_dir = exp_dir / "proc_final"
@@ -76,28 +77,43 @@ def copy_final_items(verbose: bool = False):

        for sample, categories_map in TABLE_5.items():
            for category, loudness in categories_map.items():
                item_dir_pattern = proc_output / glob.escape(category) / f"out_{glob.escape(str(loudness))}LKFS" / "c*"
                item_dir_pattern = (
                    proc_output
                    / glob.escape(category)
                    / f"out_{glob.escape(str(loudness))}LKFS"
                    / "c*"
                )
                item_filename_pattern = f"*{glob.escape(sample)}.c*.wav"

                items_to_copy = list(map(Path, glob.glob(str(item_dir_pattern / item_filename_pattern))))
                items_to_copy = list(
                    map(Path, glob.glob(str(item_dir_pattern / item_filename_pattern)))
                )

                if not items_to_copy:
                    if verbose:
                        print(f"Could not find processed item for {sample}, {category}, {loudness}LKFS, skipping")
                        print(
                            f"Could not find processed item for {sample}, {category}, {loudness}LKFS, skipping"
                        )
                    continue

                for item_to_copy in items_to_copy:
                    match = re.match(r".*/c(\d+)/[^/]*s\d+\.c\d+\.wav$", str(item_to_copy.as_posix()))
                    match = re.match(
                        r".*/c(\d+)/[^/]*s\d+\.c\d+\.wav$", str(item_to_copy.as_posix())
                    )
                    assert match, item_to_copy
                    condition = match.group(1)

                    item_target_dir = final_dir / f"c{condition}"
                    item_target_dir.mkdir(parents=True, exist_ok=True)

                    print(f"Copying {item_to_copy.relative_to(Path.cwd())} to {item_target_dir.relative_to(Path.cwd())}")
                    print(
                        f"Copying {item_to_copy.relative_to(Path.cwd())} to {item_target_dir.relative_to(Path.cwd())}"
                    )

                    if (item_target_dir / item_to_copy.name).is_file():
                        print(f"WARNING: item {item_to_copy.name} already exists in target dir, overwriting")
                        print(
                            f"WARNING: item {item_to_copy.name} already exists in target dir, overwriting"
                        )

                    shutil.copy(item_to_copy, item_target_dir)

@@ -107,7 +123,9 @@ def copy_final_items(verbose: bool = False):


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Copy samples into proc_final/cXX directories for P.800 experiments.")
    parser = argparse.ArgumentParser(
        description="Copy samples into proc_final/cXX directories for P.800 experiments."
    )
    parser.add_argument("-v", "--verbose", action="store_true")

    parsed = parser.parse_args()
+10 −3
Original line number Diff line number Diff line
@@ -65,7 +65,9 @@ def copy_preliminaries(root_dir, config_yaml):
                    print(f"  Copying {file} -> {out_dir}")

                    if (out_dir / Path(file).name).is_file():
                        raise FileExistsError(f"File {file} already exists in {out_dir}")
                        raise FileExistsError(
                            f"File {file} already exists in {out_dir}"
                        )

                    shutil.copy(file, out_dir)

@@ -73,8 +75,13 @@ def copy_preliminaries(root_dir, config_yaml):


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Copy preliminaries into proc_final/preliminaries directory for P.800 experiments.")
    parser = argparse.ArgumentParser(
        description="Copy preliminaries into proc_final/preliminaries directory for P.800 experiments."
    )

    parsed = parser.parse_args()

    copy_preliminaries(repo_root_dir() / "experiments" / "characterization", this_dir() / "p800_preliminaries.yml")
    copy_preliminaries(
        repo_root_dir() / "experiments" / "characterization",
        this_dir() / "p800_preliminaries.yml",
    )
+2 −2
Original line number Diff line number Diff line
@@ -28,9 +28,9 @@
#   the United Nations Convention on Contracts on the International Sales of Goods.

import argparse
from pathlib import Path
from hashlib import md5
from collections import Counter
from hashlib import md5
from pathlib import Path


def get_hash_line_for_file(file: Path, output_dir: Path):

pyproject.toml

0 → 100644
+11 −0
Original line number Diff line number Diff line
[project]
name = "ivas-processing-scripts"
version = "2025.09"
dependencies = ["numpy>=2.0.2,<3", "pyyaml>=6.0.2,<7", "scipy>=1.13.1,<2"]
requires-python = "==3.13.7"
maintainers = [
    { name = "Archit Tamarapu", email = "archit.tamarapu@iis.fraunhofer.de" },
]
description = "Scripts to setup and run the IVAS characterization and selection listening tests"
readme = "README.md"
license-files = ["LICENSE.md"]
Loading