Skip to content

Commit f1b0b1e

Browse files
Add 50-year repeat-year chain workflow for glorysv12-curvilinear
Submits 50 sequential 1-year SLURM jobs (Jul 2002–Jul 2003), each resetting the calendar to avoid leap-year drift. A pickup-to-init converter extracts the final state from each run to seed the next. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent e9e7030 commit f1b0b1e

3 files changed

Lines changed: 302 additions & 0 deletions

File tree

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
#!/bin/bash
2+
# repeat_year_chain.sh — Submit 50 chained repeat-year simulations.
3+
#
4+
# Each simulation runs July 1, 2002 → July 1, 2003 (365 days, nIter0=0).
5+
# After each run completes, the next job converts its pickup file to
6+
# initial conditions and launches a fresh 1-year run.
7+
#
8+
# Usage:
9+
# cd simulations/glorysv12-curvilinear
10+
# bash workflows/repeat_year_chain.sh [--dry-run]
11+
#
12+
# Requirements:
13+
# - The MITgcm data namelist must have nIter0=0, endTime=31536000.0
14+
# - The data.cal must have startDate_1=20020701
15+
# - EXF/OBC forcing files must cover the full Jul 2002–Jul 2003 period
16+
17+
set -euo pipefail
18+
19+
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
20+
SIMULATION_DIR=$(dirname "$SCRIPT_DIR")
21+
22+
N_RUNS=50
23+
EXPERIMENT="repeat-year-50"
24+
DRY_RUN=false
25+
26+
if [[ "${1:-}" == "--dry-run" ]]; then
27+
DRY_RUN=true
28+
echo "[DRY RUN] Will print sbatch commands without submitting."
29+
fi
30+
31+
EXPERIMENT_DIR="${SIMULATION_DIR}/${EXPERIMENT}"
32+
mkdir -p "${EXPERIMENT_DIR}"
33+
34+
echo "======================================="
35+
echo " Repeat-year chain: ${N_RUNS} runs"
36+
echo " Experiment dir: ${EXPERIMENT_DIR}"
37+
echo "======================================="
38+
39+
PREV_JOB_ID=""
40+
41+
for i in $(seq 1 $N_RUNS); do
42+
RUN_NUM=$(printf "%03d" $i)
43+
PREV_RUN_NUM=""
44+
if [[ $i -gt 1 ]]; then
45+
PREV_RUN_NUM=$(printf "%03d" $((i - 1)))
46+
fi
47+
48+
SBATCH_ARGS=(
49+
-n64
50+
-c1
51+
--time=3-00:00:00
52+
--nodelist=noether
53+
--job-name="repeat_yr_${RUN_NUM}"
54+
--output="${EXPERIMENT_DIR}/${RUN_NUM}-%A.out"
55+
--error="${EXPERIMENT_DIR}/${RUN_NUM}-%A.out"
56+
--chdir="${SIMULATION_DIR}"
57+
--export="ALL,RUN_NUM=${RUN_NUM},PREV_RUN_NUM=${PREV_RUN_NUM},EXPERIMENT=${EXPERIMENT},SIMULATION_DIR=${SIMULATION_DIR}"
58+
)
59+
60+
if [[ -n "$PREV_JOB_ID" ]]; then
61+
SBATCH_ARGS+=(--dependency=afterok:${PREV_JOB_ID})
62+
fi
63+
64+
if $DRY_RUN; then
65+
echo " [${RUN_NUM}] sbatch ${SBATCH_ARGS[*]} ${SCRIPT_DIR}/repeat_year_run.sh"
66+
PREV_JOB_ID="FAKE_${RUN_NUM}"
67+
else
68+
JOB_ID=$(sbatch "${SBATCH_ARGS[@]}" "${SCRIPT_DIR}/repeat_year_run.sh" | awk '{print $NF}')
69+
echo " Submitted run ${RUN_NUM}: SLURM job ${JOB_ID}"
70+
PREV_JOB_ID=$JOB_ID
71+
fi
72+
done
73+
74+
echo ""
75+
echo "All ${N_RUNS} jobs submitted."
76+
if ! $DRY_RUN; then
77+
echo "Monitor with: squeue -u \$USER --name='repeat_yr_*'"
78+
fi
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
#!/bin/bash
2+
# repeat_year_run.sh — SLURM job script for one year of a repeat-year chain.
3+
#
4+
# Expected environment variables (set by repeat_year_chain.sh via --export):
5+
# RUN_NUM — zero-padded run number (001–050)
6+
# EXPERIMENT — experiment subdirectory name (e.g. repeat-year-50)
7+
# SIMULATION_DIR — absolute path to simulations/glorysv12-curvilinear
8+
# PREV_RUN_NUM — previous run number (empty for run 001)
9+
#
10+
# The script:
11+
# 1. Creates the run directory with symlinks to input/
12+
# 2. For runs > 001: converts the previous run's pickup to init files
13+
# 3. Launches MITgcm (64-rank MPI)
14+
15+
set -euo pipefail
16+
17+
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
18+
source "$SCRIPT_DIR/env.sh"
19+
20+
REPO_ROOT=$(readlink -f "$SIMULATION_DIR/../..")
21+
RUN_DIR="${EXPERIMENT}/${RUN_NUM}"
22+
ITER_FINAL="0000087600"
23+
24+
echo "======================================="
25+
echo ""
26+
echo " Repeat-year experiment : ${EXPERIMENT}"
27+
echo " Run number : ${RUN_NUM}"
28+
echo " Run directory : ${RUN_DIR}"
29+
echo " Simulation directory : ${SIMULATION_DIR}"
30+
echo " MITgcm image : ${MITGCM_BASE_IMG}"
31+
echo " SLURM Job ID : ${SLURM_JOB_ID:-none}"
32+
echo ""
33+
echo "======================================="
34+
35+
###############################################################################
36+
# Step 1: Set up run directory — symlink all input files
37+
###############################################################################
38+
echo "--- Setting up run directory ---"
39+
srun --ntasks=1 \
40+
--mpi=pmix \
41+
--container-image=$MITGCM_BASE_IMG \
42+
--container-mounts=$SIMULATION_DIR:/workspace:rw \
43+
--container-env=RUN_DIR \
44+
/bin/bash -c "mkdir -p /workspace/$RUN_DIR && ln -sf /workspace/input/* /workspace/$RUN_DIR/"
45+
46+
echo " > Symlinks created."
47+
48+
###############################################################################
49+
# Step 2: For runs after 001, convert previous pickup to init files
50+
###############################################################################
51+
if [[ -n "${PREV_RUN_NUM:-}" ]]; then
52+
PREV_DIR="${EXPERIMENT}/${PREV_RUN_NUM}"
53+
PICKUP_PREFIX="${PREV_DIR}/pickup.${ITER_FINAL}"
54+
55+
echo "--- Converting pickup from run ${PREV_RUN_NUM} ---"
56+
echo " Pickup: ${PICKUP_PREFIX}"
57+
echo " Output: ${RUN_DIR}/"
58+
59+
srun --ntasks=1 \
60+
--mpi=pmix \
61+
--container-image=$SPECTRE_UTILS_IMG \
62+
--container-mounts=${REPO_ROOT}:/repo:rw \
63+
/bin/bash -c "cd /repo && python spectre_utils/pickup_to_init.py \
64+
simulations/glorysv12-curvilinear/${PICKUP_PREFIX} \
65+
simulations/glorysv12-curvilinear/${RUN_DIR}/ \
66+
--nx 768 --ny 424 --nr 50"
67+
68+
echo " > Init files created from pickup."
69+
fi
70+
71+
###############################################################################
72+
# Step 3: Launch MITgcm
73+
###############################################################################
74+
echo "--- Launching MITgcm ---"
75+
srun --mpi=pmix \
76+
--cpu-bind=cores \
77+
--container-image=$MITGCM_BASE_IMG \
78+
--container-mounts=$SIMULATION_DIR:/workspace:rw \
79+
--container-env=RUN_DIR \
80+
/bin/bash -c "source /opt/spack-environment/activate.sh && cd /workspace/$RUN_DIR && /workspace/exe/mitgcmuv"
81+
82+
echo "--- Run ${RUN_NUM} complete ---"

spectre_utils/pickup_to_init.py

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
#!/usr/bin/env python3
2+
"""Convert an MITgcm binary pickup file to individual init files.
3+
4+
Reads the pickup.<iter>.data/.meta pair and writes:
5+
T.init.bin, S.init.bin, U.init.bin, V.init.bin, Eta.init.bin
6+
7+
The pickup is float64 (MITgcm default for checkpoints); init files are
8+
written as float32 (matching readBinaryPrec=32 in data PARM01).
9+
10+
Usage:
11+
python pickup_to_init.py <pickup_prefix> <output_dir> [--nx 768] [--ny 424] [--nr 50]
12+
13+
Example:
14+
python pickup_to_init.py repeat-year-50/001/pickup.0000087600 repeat-year-50/002/
15+
"""
16+
17+
import argparse
18+
import re
19+
import sys
20+
from pathlib import Path
21+
22+
import numpy as np
23+
24+
25+
def parse_pickup_meta(meta_path: Path) -> dict:
26+
"""Parse a MITgcm .meta file and return dims, precision, and field list."""
27+
text = meta_path.read_text()
28+
29+
# Extract dimensions
30+
dim_match = re.search(r"dimList\s*=\s*\[\s*([\d\s,]+)\]", text)
31+
if not dim_match:
32+
raise ValueError(f"Cannot parse dimList from {meta_path}")
33+
dims = [int(x) for x in dim_match.group(1).replace(",", " ").split()]
34+
nx, ny = dims[0], dims[3]
35+
36+
# Extract precision
37+
prec_match = re.search(r"dataprec\s*=\s*\[\s*'(\w+)'\s*\]", text)
38+
dtype = np.float64 if prec_match and "64" in prec_match.group(1) else np.float32
39+
40+
# Extract number of records
41+
nrec_match = re.search(r"nrecords\s*=\s*\[\s*(\d+)\s*\]", text)
42+
nrecords = int(nrec_match.group(1)) if nrec_match else None
43+
44+
# Extract field list
45+
fld_match = re.search(r"fldList\s*=\s*\{([^}]+)\}", text)
46+
if not fld_match:
47+
raise ValueError(f"Cannot parse fldList from {meta_path}")
48+
fields = re.findall(r"'(\w+)\s*'", fld_match.group(1))
49+
50+
return {"nx": nx, "ny": ny, "dtype": dtype, "nrecords": nrecords, "fields": fields}
51+
52+
53+
def pickup_to_init(pickup_prefix: str, output_dir: str, nx: int, ny: int, nr: int):
54+
"""Read a pickup file and write individual init .bin files."""
55+
meta_path = Path(pickup_prefix + ".meta")
56+
data_path = Path(pickup_prefix + ".data")
57+
out = Path(output_dir)
58+
59+
if not meta_path.exists():
60+
raise FileNotFoundError(f"Meta file not found: {meta_path}")
61+
if not data_path.exists():
62+
raise FileNotFoundError(f"Data file not found: {data_path}")
63+
64+
meta = parse_pickup_meta(meta_path)
65+
dtype = meta["dtype"]
66+
fields = meta["fields"]
67+
rec_size = nx * ny
68+
69+
print(f"Pickup: {data_path}")
70+
print(f" Grid: {nx} x {ny} x {nr}")
71+
print(f" Precision: {dtype}")
72+
print(f" Fields: {fields}")
73+
print(f" Total records: {meta['nrecords']}")
74+
75+
# Map pickup field names to init file names and their depth (nr for 3D, 1 for 2D)
76+
field_map = {
77+
"Uvel": ("U.init.bin", nr),
78+
"Vvel": ("V.init.bin", nr),
79+
"Theta": ("T.init.bin", nr),
80+
"Salt": ("S.init.bin", nr),
81+
"EtaN": ("Eta.init.bin", 1),
82+
}
83+
84+
# Compute byte offsets for each field in the pickup
85+
bytes_per_val = np.dtype(dtype).itemsize
86+
rec_bytes = rec_size * bytes_per_val
87+
88+
# Build offset table: walk through fields in order
89+
offsets = {}
90+
current_rec = 0
91+
for fld in fields:
92+
# 3D fields have nr levels, 2D fields have 1 level
93+
if fld in ("EtaN", "dEtaHdt", "EtaH"):
94+
nlevels = 1
95+
else:
96+
nlevels = nr
97+
offsets[fld] = (current_rec, nlevels)
98+
current_rec += nlevels
99+
100+
print(f" Computed record layout: {offsets}")
101+
102+
# Read and write the fields we need
103+
out.mkdir(parents=True, exist_ok=True)
104+
with open(data_path, "rb") as f:
105+
for fld_name, (init_name, nlevels) in field_map.items():
106+
if fld_name not in offsets:
107+
print(f" WARNING: field '{fld_name}' not found in pickup, skipping")
108+
continue
109+
110+
start_rec, expected_levels = offsets[fld_name]
111+
assert expected_levels == nlevels, (
112+
f"Level mismatch for {fld_name}: expected {nlevels}, got {expected_levels}"
113+
)
114+
115+
# Seek to field start and read
116+
f.seek(start_rec * rec_bytes)
117+
data = np.fromfile(f, dtype=dtype, count=rec_size * nlevels)
118+
data = data.reshape((nlevels, ny, nx))
119+
120+
# Convert to float32 for init files
121+
init_path = out / init_name
122+
data.astype(np.float32).tofile(init_path)
123+
size_mb = init_path.stat().st_size / 1e6
124+
print(f" Wrote {init_path} ({size_mb:.1f} MB)")
125+
126+
print("Done.")
127+
128+
129+
def main():
130+
parser = argparse.ArgumentParser(description="Convert MITgcm pickup to init files")
131+
parser.add_argument("pickup_prefix", help="Path prefix (without .data/.meta)")
132+
parser.add_argument("output_dir", help="Directory to write init files")
133+
parser.add_argument("--nx", type=int, default=768, help="Grid points in X")
134+
parser.add_argument("--ny", type=int, default=424, help="Grid points in Y")
135+
parser.add_argument("--nr", type=int, default=50, help="Grid points in Z")
136+
args = parser.parse_args()
137+
138+
pickup_to_init(args.pickup_prefix, args.output_dir, args.nx, args.ny, args.nr)
139+
140+
141+
if __name__ == "__main__":
142+
main()

0 commit comments

Comments
 (0)