# this is a testing/maintenance mechanism to force getting the log history from a specific job id
# see below in the concrete complexity jobs
-if [ "$JOB_ID_INJECT" != "" ]; then job_id=$JOB_ID_INJECT; fi
-curl --silent --show-error --request GET "$CI_API_V4_URL/projects/$CI_PROJECT_ID/jobs/$job_id/artifacts" --output artifacts.zip
-unzip -qq artifacts.zip || true# this may fail on first run, when there are no artifacts there and the zip file is actually just "404"-html
-public_dir="$CI_JOB_NAME-public"
# if is needed to catch case when no artifact is there (first run), similarly as above
# 1. check for public_dir being there as this might not be the case when artifact download failed
# 2. check for public dir not being empty - handle job failures in prev job that happen after the dir is created. In that case, the empty dir is in the artifacts
-if [ -d $public_dir ] && [ ! -z "$( ls -A $public_dir )" ]; then
-mv $public_dir/* wmops/
# check here if we have the split-by-levels files present - if not, fake them up with the existing global one
# this is needed for the first run with split graphs on a branch where the global version did run previously
# NOTE: checking only for level_1 file here as this should already be sufficient
# NOTE2: also not chechking for RAM for same reason