Skip to content

Commit cabc3a2

Browse files
Sakib Rahmanclaude
andcommitted
Fix RESULTS_BASE to respect job-specific variable overrides
Move RESULTS_BASE calculation from global variables to runtime in each job template's script section. This ensures RESULTS_BASE is calculated using the actual DETECTOR_CONFIG and other variables after job-specific overrides are applied. Previously, RESULTS_BASE was evaluated once globally with the default DETECTOR_CONFIG value and never recalculated when jobs overrode DETECTOR_CONFIG. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <[email protected]>
1 parent 1dcfa9b commit cabc3a2

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

.gitlab-ci.yml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ variables:
33
IMAGE_TAG: "nightly"
44
DETECTOR_CONFIG: "epic_craterlake"
55
DETECTOR_VERSION: "main"
6-
RESULTS_BASE: "results/${CONTAINER_NAME}/${IMAGE_TAG}/${DETECTOR_CONFIG}/${DETECTOR_VERSION}"
76

87
image: eicweb.phy.anl.gov:4567/containers/eic_container/${CONTAINER_NAME}:${IMAGE_TAG}
98

@@ -25,6 +24,7 @@ stages:
2524
.nevents:
2625
stage: nevents
2726
script:
27+
- export RESULTS_BASE="results/${CONTAINER_NAME}/${IMAGE_TAG}/${DETECTOR_CONFIG}/${DETECTOR_VERSION}"
2828
- mkdir -p $(dirname ${RESULTS_BASE}/datasets/glob/$DATA)
2929
- grep -v "^\#" $DATA | parallel scripts/glob.sh ${RESULTS_BASE}/datasets/glob/$DATA {}
3030
- sort -o ${RESULTS_BASE}/datasets/glob/$DATA ${RESULTS_BASE}/datasets/glob/$DATA
@@ -36,8 +36,10 @@ stages:
3636
.timings:
3737
stage: timings
3838
script:
39+
- export RESULTS_BASE="results/${CONTAINER_NAME}/${IMAGE_TAG}/${DETECTOR_CONFIG}/${DETECTOR_VERSION}"
3940
- echo "DEBUG - DETECTOR_CONFIG from CI = ${DETECTOR_CONFIG}"
4041
- echo "DEBUG - DETECTOR_VERSION from CI = ${DETECTOR_VERSION}"
42+
- echo "DEBUG - RESULTS_BASE = ${RESULTS_BASE}"
4143
- mkdir -p $(dirname ${RESULTS_BASE}/datasets/timings/$DATA)
4244
# Use sed '1!d1' instead of head -n 1 to avoid pipefail issues
4345
- grep -v "^\#" ${RESULTS_BASE}/datasets/nevents/$DATA | sed '1!d' | parallel scripts/determine_timing.sh ${RESULTS_BASE}/datasets/timings/$DATA {}
@@ -50,13 +52,15 @@ stages:
5052
.timings_all:
5153
stage: timings
5254
script:
55+
- export RESULTS_BASE="results/${CONTAINER_NAME}/${IMAGE_TAG}/${DETECTOR_CONFIG}/${DETECTOR_VERSION}"
5356
- mkdir -p $(dirname ${RESULTS_BASE}/datasets/timings/$DATA)
5457
- grep -v "^\#" ${RESULTS_BASE}/datasets/nevents/$DATA | parallel scripts/determine_timing.sh ${RESULTS_BASE}/datasets/timings/$DATA {}
5558
- sort -o ${RESULTS_BASE}/datasets/timings/$DATA ${RESULTS_BASE}/datasets/timings/$DATA
5659

5760
.collect:
5861
stage: collect
5962
script:
63+
- export RESULTS_BASE="results/${CONTAINER_NAME}/${IMAGE_TAG}/${DETECTOR_CONFIG}/${DETECTOR_VERSION}"
6064
- rm -rf results/logs/
6165
- find ${RESULTS_BASE}/datasets/
6266
- find ${RESULTS_BASE}/datasets/timings/ -name "*.csv" -print0 -exec awk 'BEGIN {FS=","} {sum+=$3*$5+$4} END {print(":",sum/3600,"core-hours")}' {} \;

0 commit comments

Comments
 (0)