Skip to content

CI (pull_request) for 132/merge #36

CI (pull_request) for 132/merge

CI (pull_request) for 132/merge #36

GitHub Actions / Repro Test Results failed Jul 10, 2024 in 0s

2 fail in 1h 2m 24s

2 tests   0 ✅  1h 2m 24s ⏱️
1 suites  0 💤
1 files    2 ❌

Results for commit 7b058b3.

Annotations

Check warning on line 0 in test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility

See this annotation in the file changed.

@github-actions github-actions / Repro Test Results

test_bit_repro_historical (test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed

/opt/testing/checksum/test_report.xml [took 11m 50s]
Raw output
AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc/checksum/historical-3hr-checksum.json.
assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
  
  Omitting 1 identical items, use -vv to show
  Differing items:
  {'output': {'Advection of u': ['0', '-6030637310235604706'], 'Advection of v': ['0', '-185742899745698524'], 'Meridional velocity': ['-7041711812585786936', '-2015881717187427893'], 'Thickness%depth_st': ['-1759098274750011954'], ...}} != {'output': {'Advection of u': ['0', '-6851658256333481521'], 'Advection of v': ['0', '-2231802981572564633'], 'Meridional velocity': ['-7041711812585786936', '-7787270711746013894'], 'Thickness%depth_st': ['-1759098274750011954'], ...}}
  Use -v to get more diff
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7f61cdbdbf90>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc/base-experiment')
checksum_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc/base-experiment/testing/checksum/historical-3hr-checksum.json')

    @pytest.mark.checksum
    def test_bit_repro_historical(
        self, output_path: Path, control_path: Path, checksum_path: Path
    ):
        """
        Test that a run reproduces historical checksums
        """
        # Setup checksum output directory
        # NOTE: The checksum output file is used as part of `repro-ci` workflows
        output_dir = output_path / "checksum"
        output_dir.mkdir(parents=True, exist_ok=True)
        checksum_output_file = output_dir / "historical-3hr-checksum.json"
        if checksum_output_file.exists():
            checksum_output_file.unlink()
    
        # Setup and run experiment
        exp = setup_exp(control_path, output_path, "test_bit_repro_historical")
        exp.model.set_model_runtime()
        exp.setup_and_run()
    
        assert exp.model.output_exists()
    
        # Check checksum against historical checksum file
        hist_checksums = None
        hist_checksums_schema_version = None
    
        if (
            not checksum_path.exists()
        ):  # AKA, if the config branch doesn't have a checksum, or the path is misconfigured
            hist_checksums_schema_version = exp.model.default_schema_version
        else:  # we can use the historic-3hr-checksum that is in the testing directory
            with open(checksum_path) as file:
                hist_checksums = json.load(file)
    
                # Parse checksums using the same version
                hist_checksums_schema_version = hist_checksums["schema_version"]
    
        checksums = exp.extract_checksums(schema_version=hist_checksums_schema_version)
    
        # Write out checksums to output file
        with open(checksum_output_file, "w") as file:
            json.dump(checksums, file, indent=2)
    
>       assert (
            hist_checksums == checksums
        ), f"Checksums were not equal. The new checksums have been written to {checksum_output_file}."
E       AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc/checksum/historical-3hr-checksum.json.
E       assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
E         
E         Omitting 1 identical items, use -vv to show
E         Differing items:
E         {'output': {'Advection of u': ['0', '-6030637310235604706'], 'Advection of v': ['0', '-185742899745698524'], 'Meridional velocity': ['-7041711812585786936', '-2015881717187427893'], 'Thickness%depth_st': ['-1759098274750011954'], ...}} != {'output': {'Advection of u': ['0', '-6851658256333481521'], 'Advection of v': ['0', '-2231802981572564633'], 'Meridional velocity': ['-7041711812585786936', '-7787270711746013894'], 'Thickness%depth_st': ['-1759098274750011954'], ...}}
E         Use -v to get more diff

../test-venv/lib/python3.11/site-packages/model_config_tests/test_bit_reproducibility.py:59: AssertionError

Check warning on line 0 in test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility

See this annotation in the file changed.

@github-actions github-actions / Repro Test Results

test_restart_repro (test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed

/opt/testing/checksum/test_report.xml [took 50m 32s]
Raw output
assert False
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7f61cdbf0c50>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-01deg_jra55_iaf_bgc/base-experiment')

    @pytest.mark.checksum
    def test_restart_repro(self, output_path: Path, control_path: Path):
        """
        Test that a run reproduces across restarts.
        """
        # First do two short (1 day) runs.
        exp_2x1day = setup_exp(control_path, output_path, "test_restart_repro_2x1day")
    
        # Reconfigure to a 1 day run.
        exp_2x1day.model.set_model_runtime(seconds=86400)
    
        # Now run twice.
        exp_2x1day.setup_and_run()
        exp_2x1day.force_qsub_run()
    
        # Now do a single 2 day run
        exp_2day = setup_exp(control_path, output_path, "test_restart_repro_2day")
        # Reconfigure
        exp_2day.model.set_model_runtime(seconds=172800)
    
        # Run once.
        exp_2day.setup_and_run()
    
        # Now compare the output between our two short and one long run.
        checksums_1d_0 = exp_2x1day.extract_checksums()
        checksums_1d_1 = exp_2x1day.extract_checksums(exp_2x1day.output001)
    
        checksums_2d = exp_2day.extract_checksums()
    
        # Use model specific comparision method for checksums
        model = exp_2day.model
        matching_checksums = model.check_checksums_over_restarts(
            long_run_checksum=checksums_2d,
            short_run_checksum_0=checksums_1d_0,
            short_run_checksum_1=checksums_1d_1,
        )
    
        if not matching_checksums:
            # Write checksums out to file
            with open(output_path / "restart-1d-0-checksum.json", "w") as file:
                json.dump(checksums_1d_0, file, indent=2)
            with open(output_path / "restart-1d-1-checksum.json", "w") as file:
                json.dump(checksums_1d_1, file, indent=2)
            with open(output_path / "restart-2d-0-checksum.json", "w") as file:
                json.dump(checksums_2d, file, indent=2)
    
>       assert matching_checksums
E       assert False

../test-venv/lib/python3.11/site-packages/model_config_tests/test_bit_reproducibility.py:131: AssertionError