Commit 92a8417c authored by kinuthia's avatar kinuthia
Browse files

remove job from .pre stage

parent b4255d5d
Loading
Loading
Loading
Loading
Loading
+1 −27
Original line number Diff line number Diff line
@@ -295,11 +295,10 @@ workflow:

    # Aggregate job logs
    - if [ $GET_LOGS -eq 1 ]; then
      - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines"

    - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID)
    - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous"
    #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip
    - curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip
    - unzip artifacts.zip -d previous_artifacts
    # This wildcard thingy relies on only one csv file being present per job
    - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv"
@@ -1662,8 +1661,6 @@ ivas-long-term-job-logs:
    - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job
  tags:
    - ivas-linux
  timeout: "20 minutes"
  stage: .pre
  before_script:
    - USE_LTV=1
    - DUT_DECODER_PATH=./$REF_DECODER_PATH
@@ -1673,29 +1670,6 @@ ivas-long-term-job-logs:
    - GET_LOGS=1
  <<: *ivas-pytest-anchor

long-term-job-logs:
  rules:
    - if: $CI_PIPELINE_SOURCE == 'web' && $MANUAL_PIPELINE_TYPE == "long-term-logs" # change this to a scheduled job
  stage: .pre
  timeout: "20 minutes"
  script:
  - curl --request GET "https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines"

  - id_previous=$(python3 ci/get_id_of_last_job_occurence.py $CI_DEFAULT_BRANCH ivas-pytest-compare_ref-long-enc $CI_PROJECT_ID)
  - echo "Job ID from variables - ivas-pytest-compare_ref-long-enc, Job ID from script - $id_previous"
  #- curl --request GET "https://forge.3gpp.org/rep/api/v4/projects/$CI_PROJECT_ID/jobs/$id_previous/artifacts" --output artifacts.zip
  - unzip artifacts.zip -d previous_artifacts
  # This wildcard thingy relies on only one csv file being present per job
  - ivas-pytest-compare_ref-long-enc_csv="previous_artifacts/mld--ivas-pytest-compare_ref-long-enc-$id_previous--sha-*.csv"
  - echo $ivas-pytest-compare_ref-long-enc_csv
  #- python3 ci/basop-pages/create_report_pages.py $PAGES_HTML_ARTIFACT_NAME $MERGED_CSV_ARTIFACT_NAME $CSV_ARTIFACT_NAME $file_previous $CI_JOB_ID $id_previous $CI_JOB_NAME
  - TODAY=$(date +'%Y-%m-%d')
  # create log if it doesn't exist
  - mkdir -p logs/$TODAY
  - mv ivas-pytest-compare_ref-long-enc_csv logs/$TODAY
  - ls
  - ls logs