Commit f55e8847 authored by BOHMRR's avatar BOHMRR
Browse files

pytest: change default options (add '-n auto', remove '-v')

parent 1ae3e1d6
Loading
Loading
Loading
Loading
+8 −8
Original line number Diff line number Diff line
@@ -315,13 +315,13 @@ pytest-on-merge-request:
    - mv IVAS_cod_test IVAS_cod
    - mv IVAS_dec_test IVAS_dec
    # create references
    - python3 -m pytest tests -n auto --update_ref 1 -m create_ref
    - python3 -m pytest tests -n auto --update_ref 1 -m create_ref_part2
    - python3 -m pytest tests/test_param_file.py -n auto --update_ref 1 -m create_ref --param_file scripts/config/self_test_evs.prm
    - python3 -m pytest tests -v --update_ref 1 -m create_ref
    - python3 -m pytest tests -v --update_ref 1 -m create_ref_part2
    - python3 -m pytest tests/test_param_file.py -v --update_ref 1 -m create_ref --param_file scripts/config/self_test_evs.prm

    ### run pytest
    - exit_code=0
    - python3 -m pytest tests -n auto --junit-xml=report-junit.xml || exit_code=$?
    - python3 -m pytest tests -v --junit-xml=report-junit.xml || exit_code=$?
    - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true

    - if [ $zero_errors != 1 ]; then echo "Run errors in pytest"; fail_1=1; fi
@@ -331,7 +331,7 @@ pytest-on-merge-request:

    ### run pytest for EVS cases
    - exit_code=0
    - python3 -m pytest tests/test_param_file.py -n auto --param_file scripts/config/self_test_evs.prm --junit-xml=report-junit-evs.xml || exit_code=$?
    - python3 -m pytest tests/test_param_file.py -v --param_file scripts/config/self_test_evs.prm --junit-xml=report-junit-evs.xml || exit_code=$?
    - zero_errors=$(cat report-junit-evs.xml | grep -c 'errors="0"') || true

    - if [ $zero_errors != 1 ]; then echo "Run errors in pytest for EVS"; fail_2=1; fi
@@ -444,12 +444,12 @@ codec-comparison-on-main-push:
    - mv IVAS_cod_test IVAS_cod
    - mv IVAS_dec_test IVAS_dec
    # create references
    - python3 -m pytest tests -n auto --update_ref 1 -m create_ref
    - python3 -m pytest tests -n auto --update_ref 1 -m create_ref_part2
    - python3 -m pytest tests -v --update_ref 1 -m create_ref
    - python3 -m pytest tests -v --update_ref 1 -m create_ref_part2

    ### run pytest
    - exit_code=0
    - python3 -m pytest tests -n auto --junit-xml=report-junit.xml || exit_code=$?
    - python3 -m pytest tests -v --junit-xml=report-junit.xml || exit_code=$?
    - if [ $exit_code -eq 1 ] && [ $non_be_flag == 0 ]; then echo "pytest run had failures and non-BE flag not present"; exit $EXIT_CODE_FAIL; fi
    - zero_errors=$(cat report-junit.xml | grep -c 'errors="0"') || true
    - if [ $exit_code -eq 1 ] && [ $zero_errors == 1 ]; then echo "pytest run had failures, but no errors and non-BE flag present"; exit $EXIT_CODE_NON_BE; fi
+1 −1
Original line number Diff line number Diff line
# pytest.ini
# note: per convention, this file is placed in the root directory of the repository
[pytest]
addopts = -ra --tb=short --basetemp=./tmp -v
addopts = -ra --tb=short --basetemp=./tmp -n auto
# Write captured system-out log messages to JUnit report.
junit_logging = system-out
# Do not capture log information for passing tests to JUnit report.
+6 −6
Original line number Diff line number Diff line
@@ -62,8 +62,8 @@ When different test binaries are to be used, they can be specified via the `--du
# - IVAS_cod_ref(.exe)
# - IVAS_dec_ref(.exe)
# pytest command lines to be executed from project root folder:
pytest tests -n auto --update_ref 1 -m create_ref
pytest tests -n auto --update_ref 1 -m create_ref_part2
pytest tests --update_ref 1 -m create_ref
pytest tests --update_ref 1 -m create_ref_part2
```

## Running the tests
@@ -72,7 +72,7 @@ To run all tests from the tests folder:

```bash
# pytest command line to be executed from project root folder:
pytest tests -n auto
pytest tests
```

## Re-running some tests
@@ -81,7 +81,7 @@ When there are test failures, you may want to run, after having fixed the code,

```bash
# rerun only the tests that failed at the last run
pytest tests -n auto --last-failed
pytest tests --last-failed
```

To run a specific test case, you can e.g. pick a test case from the `short test summary info` and use that test case as an argument to `pytest`. E.g.
@@ -104,9 +104,9 @@ pytest tests/test_sba_bs_dec_plc.py::test_sba_plc_system

When there a many test failures, you can use the `-x` (or `--exitfirst`) option to stop testing on the first failure.

Commonly used options like `-n auto` can be added to addopts within the [pytest] section in `pytest.ini`. This saves some typing when calling `pytest`.
Commonly used options like `-n auto` are added to addopts within the [pytest] section in `pytest.ini`. This saves some typing when calling `pytest`.

The `-v` (or `--verbose`) option is usually helpful to see what is going on. Therefore, `-v` is currently part of addopts in `pytest.ini`. If you don't like this verbosity, you can specify the `-q` (`--quiet`) option when running `pytest`.
The `-v` (or `--verbose`) option is helpful to see what is going on.

## Custom options