Commit a44322cc authored by kinuthia's avatar kinuthia
Browse files

remove job from anchor and to .pre stage

parent 92a8417c
Loading
Loading
Loading
Loading
Loading
+61 −1
Original line number Diff line number Diff line
@@ -1661,6 +1661,8 @@ ivas-long-term-job-logs:
    - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job
  tags:
    - ivas-linux
  stage: .pre
  timeout: "20 minutes"
  before_script:
    - USE_LTV=1
    - DUT_DECODER_PATH=./$REF_DECODER_PATH
@@ -1668,7 +1670,65 @@ ivas-long-term-job-logs:
    - LEVEL_SCALING=1.0
    - SPLIT_COMPARISON="false"
    - GET_LOGS=1
  <<: *ivas-pytest-anchor
  variables:
    # keep "mld" in artifact name for backwards compatibility reasons
    CSV_ARTIFACT_NAME: "mld--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv"
    CSV_ARTIFACT_SPLIT: "mld--split--$CI_JOB_NAME-$CI_JOB_ID--sha-$CI_COMMIT_SHORT_SHA.csv"
    MERGED_CSV_ARTIFACT_NAME: "$CI_JOB_NAME--merged_csv--$CI_JOB_ID.csv"
    PAGES_HTML_ARTIFACT_NAME: "$CI_JOB_NAME-index.html"
    SUMMARY_HTML_ARTIFACT_NAME: "summary_$CI_JOB_NAME.html"
    SUMMARY_HTML_ARTIFACT_SPLIT: "summary_split_$CI_JOB_NAME.html"
    IMAGES_ARTIFACT_NAME: "images_$CI_JOB_NAME"
    IMAGES_ARTIFACT_SPLIT: "images_split_$CI_JOB_NAME"
  script:
    - !reference [ .job-linux, before_script ]

    - set -euxo pipefail
    - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/print-common-info.sh
    - bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/basop/update-scripts-repo.sh
    - if [ $USE_LTV -eq 1 ]; then
    -    bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/update-ltv-repo.sh
    -    bash "${CI_PROJECT_DIR}"/ivas-codec-ci/snippets/copy-ltv-files-to-testv-dir.sh
    -    testcase_timeout=$TESTCASE_TIMEOUT_LTV
    - else
    -    testcase_timeout=$TESTCASE_TIMEOUT_STV
    - fi

    # Aggregate job logs
    - if [ $GET_LOGS -eq 1 ]; then

    - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID)
    - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous"
    - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip
    - unzip artifacts.zip -d previous_artifacts
    # This wildcard thingy relies on only one csv file being present per job
    - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv"
    - echo $ivas-pytest-compare_ref-long-enc_csv
    #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME
    - TODAY=$(date +'%Y-%m-%d')
    # create log if it doesn't exist
    - mkdir -p logs/$TODAY
    - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY
    - ls
    - fi
    - exit 0

  allow_failure:
    exit_codes:
      - 123
  artifacts:
    name: "$CI_JOB_NAME--sha-$CI_COMMIT_SHORT_SHA--results"
    expire_in: 1 week
    when: always
    paths:
      #- report-junit.xml
      #- report.html
      - logs
    expose_as: "ivas long term job logs results"
    #reports:
    #  junit:
    #    - report-junit.xml