GitHub Actions / Repro Test Results
failed
Apr 21, 2026 in 0s
1 fail, 2 pass in 6m 34s
3 tests 2 ✅ 6m 34s ⏱️
1 suites 0 💤
1 files 1 ❌
Results for commit 53b31e0.
Annotations
github-actions / Repro Test Results
test_repro_historical (test-venv.lib.python3.10.site-packages.model_config_tests.config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 6m 26s]
Raw output
AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-esm1.6-configs/pr713/cherry_pick_from_pr707_into_dev-1pctCO2/92fbd79ad80e958e66c7be0fb6489a406d33fcb9/checksum/historical-24hr-checksum.json.
assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
Omitting 1 identical items, use -vv to show
Differing items:
{'output': {'Advection of u': ['-4190221490881490267', '-1589812527242329919'], 'Advection of v': ['579134641775037179...dional velocity': ['5615118027786391092', '2950688743774816779'], 'Thickness%depth_st': ['-2190684862846133371'], ...}} != {'output': {'Advection of u': ['-4190221490881490267', '-1589812527242329919'], 'Advection of v': ['579134641775037179...dional velocity': ['5615118027786391092', '2950688743774816779'], 'Thickness%depth_st': ['-2190684862846133371'], ...}}
Use -v to get more diff
self = <test_bit_reproducibility.TestBitReproducibility object at 0x7f9f59eb5b10>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-esm1.6-configs/pr713/cherry_pick_from_pr707_into_dev-1pctCO2/92fbd79ad80e958e66c7be0fb6489a406d33fcb9')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-esm1.6-configs/pr713/cherry_pick_from_pr707_into_dev-1pctCO2/92fbd79ad80e958e66c7be0fb6489a406d33fcb9/base-experiment')
requested_experiments = {'exp_default_runtime': <model_config_tests.exp_test_helper.ExpTestHelper object at 0x7f9f59eb5720>}
checksum_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-esm1.6-configs/pr713/cherry_pick_from_pr707_into_dev-1pctCO2/92fbd79ad80e958e66c7be0fb6489a406d33fcb9/compared/testing/checksum/historical-24hr-checksum.json')
@pytest.mark.repro
@pytest.mark.repro_historical
@pytest.mark.experiments(
{
EXP_DEFAULT_RUNTIME: {"n_runs": 1},
}
)
def test_repro_historical(
self,
output_path: Path,
control_path: Path,
requested_experiments: dict[str, ExpTestHelper],
checksum_path: Optional[Path],
):
"""
Historical reproducibility test that confirms results from a model
run match a stored previous result. Any generated results are
added to a "checksum" subdirectory in the output directory.
Parameters
----------
output_path: Path
Output directory for test output and where the control and
lab directories are stored for the payu experiments. Default is
set in conftest.py.
control_path: Path
Path to the model configuration to test. This is copied for
for control directories in experiments. Default is set in
conftests.py.
requested_experiments: dict[str, ExpTestHelper]
A dictionary of requested experiments, where the key is the
experiment name and the value is an instance of ExpTestHelper.
checksum_path: Optional[Path]
Path to checksums to compare model output against. Default is
set to checksums saved on model configuration. This is a
fixture defined in conftests.py
"""
# Get output directory for the checksums
checksum_output_dir = set_checksum_output_dir(output_path=output_path)
# Use default runtime experiment to get the historical checksums
exp = requested_experiments.get(EXP_DEFAULT_RUNTIME)
# Set the checksum output filename using the model default runtime
runtime_hours = exp.model.default_runtime_seconds // HOUR_IN_SECONDS
checksum_filename = f"historical-{runtime_hours}hr-checksum.json"
# Read the historical checksum file
hist_checksums = read_historical_checksums(
control_path, checksum_filename, checksum_path
)
# Use historical file checksums schema version for parsing checksum,
# otherwise use the model default, if file does not exist
schema_version = (
hist_checksums["schema_version"]
if hist_checksums
else exp.model.default_schema_version
)
# Extract checksums
checksums = exp.extract_checksums(schema_version=schema_version)
# Write out checksums to output file
checksum_output_file = checksum_output_dir / checksum_filename
with open(checksum_output_file, "w") as file:
json.dump(checksums, file, indent=2)
> assert (
hist_checksums == checksums
), f"Checksums were not equal. The new checksums have been written to {checksum_output_file}."
E AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-esm1.6-configs/pr713/cherry_pick_from_pr707_into_dev-1pctCO2/92fbd79ad80e958e66c7be0fb6489a406d33fcb9/checksum/historical-24hr-checksum.json.
E assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
E
E Omitting 1 identical items, use -vv to show
E Differing items:
E {'output': {'Advection of u': ['-4190221490881490267', '-1589812527242329919'], 'Advection of v': ['579134641775037179...dional velocity': ['5615118027786391092', '2950688743774816779'], 'Thickness%depth_st': ['-2190684862846133371'], ...}} != {'output': {'Advection of u': ['-4190221490881490267', '-1589812527242329919'], 'Advection of v': ['579134641775037179...dional velocity': ['5615118027786391092', '2950688743774816779'], 'Thickness%depth_st': ['-2190684862846133371'], ...}}
E Use -v to get more diff
../test-venv/lib/python3.10/site-packages/model_config_tests/config_tests/test_bit_reproducibility.py:234: AssertionError
Check notice on line 0 in .github
github-actions / Repro Test Results
3 tests found
There are 3 tests, see "Raw output" for the full list of tests.
Raw output
test-venv.lib.python3.10.site-packages.model_config_tests.config_tests.test_bit_reproducibility ‑ test_repro_payu_setup
test-venv.lib.python3.10.site-packages.model_config_tests.config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_repro_determinism
test-venv.lib.python3.10.site-packages.model_config_tests.config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_repro_historical
Loading