From 961b6b689ce172b38850ef8a0a74b079a029a751 Mon Sep 17 00:00:00 2001 From: Michael Zingale Date: Mon, 1 Jan 2024 14:58:43 -0500 Subject: [PATCH 1/5] add a resolution to the nse_test convergence (#2693) allow for 3 different convergence orders --- .../convergence_simplified_sdc_w_vel.sh | 8 +- .../nse_test/create_pretty_tables.py | 132 ++++++++++++------ Exec/reacting_tests/nse_test/inputs.128 | 2 +- Exec/reacting_tests/nse_test/inputs.256 | 2 +- Exec/reacting_tests/nse_test/inputs.32 | 74 ++++++++++ Exec/reacting_tests/nse_test/inputs.512 | 2 +- Exec/reacting_tests/nse_test/inputs.64 | 2 +- 7 files changed, 173 insertions(+), 49 deletions(-) create mode 100644 Exec/reacting_tests/nse_test/inputs.32 diff --git a/Exec/reacting_tests/nse_test/convergence_simplified_sdc_w_vel.sh b/Exec/reacting_tests/nse_test/convergence_simplified_sdc_w_vel.sh index 9d118c0718..0748f46b5b 100755 --- a/Exec/reacting_tests/nse_test/convergence_simplified_sdc_w_vel.sh +++ b/Exec/reacting_tests/nse_test/convergence_simplified_sdc_w_vel.sh @@ -10,14 +10,18 @@ problem.u0=1.e8 problem.v0=1.e8 " +mpiexec -n 8 ${EXEC} inputs.32 ${RUNPARAMS} >& /dev/null mpiexec -n 8 ${EXEC} inputs.64 ${RUNPARAMS} >& /dev/null mpiexec -n 8 ${EXEC} inputs.128 ${RUNPARAMS} >& /dev/null + +RichardsonConvergenceTest2d.gnu.ex coarFile=nse_test_32_plt00080 mediFile=nse_test_64_plt00160 fineFile=nse_test_128_plt00320 >& nse_convergence_simple_sdc_vlo.out + mpiexec -n 8 ${EXEC} inputs.256 ${RUNPARAMS} >& /dev/null -RichardsonConvergenceTest2d.gnu.ex coarFile=nse_test_64_plt00125 mediFile=nse_test_128_plt00250 fineFile=nse_test_256_plt00500 >& nse_convergence_simple_sdc_lo.out +RichardsonConvergenceTest2d.gnu.ex coarFile=nse_test_64_plt00160 mediFile=nse_test_128_plt00320 fineFile=nse_test_256_plt00640 >& nse_convergence_simple_sdc_lo.out mpiexec -n 8 ${EXEC} inputs.512 ${RUNPARAMS} >& /dev/null -RichardsonConvergenceTest2d.gnu.ex coarFile=nse_test_128_plt00250 mediFile=nse_test_256_plt00500 fineFile=nse_test_512_plt01000 >& nse_convergence_simple_sdc_hi.out +RichardsonConvergenceTest2d.gnu.ex coarFile=nse_test_128_plt00320 mediFile=nse_test_256_plt00640 fineFile=nse_test_512_plt01280 >& nse_convergence_simple_sdc_hi.out diff --git a/Exec/reacting_tests/nse_test/create_pretty_tables.py b/Exec/reacting_tests/nse_test/create_pretty_tables.py index ca8a3991f1..8c8d05ca3a 100644 --- a/Exec/reacting_tests/nse_test/create_pretty_tables.py +++ b/Exec/reacting_tests/nse_test/create_pretty_tables.py @@ -14,13 +14,21 @@ def sci_not(num): return r"${:5.3f} \times 10^{{{}}}$".format(round(mant, 3), exp) class Variable(): - def __init__(self, name, lo, o1, med, o2, hi): + def __init__(self, name, lo, o1, med, o2, hi, o3=None, vhi=None): self.name = name self.lo = float(lo) self.o1 = float(o1) self.med = float(med) self.o2 = float(o2) self.hi = float(hi) + if o3 is not None: + self.o3 = float(o3) + else: + self.o3 = None + if vhi is not None: + self.vhi = float(vhi) + else: + self.vhi = None def get_table_line(self, pretty_name=None, simple=False): if pretty_name is not None: @@ -29,69 +37,103 @@ def get_table_line(self, pretty_name=None, simple=False): name = self.name if simple: - _str = r" {:27} {:14.10g} {:5.3f} {:14.10g} {:5.3f} {:14.10g}" - return _str.format(name, self.lo, round(self.o1, 3), self.med, round(self.o2, 3), self.hi) + if self.o3 is None: + return rf" {name:27} {self.lo:14.10g} {round(self.o1, 3):5.3f} {self.med:14.10g} {round(self.o2, 3):5.3f} {self.hi:14.10g}" + else: + return rf" {name:27} {self.lo:14.10g} {round(self.o1, 3):5.3f} {self.med:14.10g} {round(self.o2, 3):5.3f} {self.hi:14.10g} {round(self.o3, 3):5.3f} {self.vhi:14.10g}" else: - _str = r" {:27} & {:23} & {:5.3f} & {:23} & {:5.3f} & {:23} \\" - return _str.format(name, sci_not(self.lo), round(self.o1, 3), sci_not(self.med), round(self.o2, 3), sci_not(self.hi)) + if self.o3 is None: + return rf" {name:27} & {sci_not(self.lo):23} & {round(self.o1, 3):5.3f} & {sci_not(self.med):23} & {round(self.o2, 3):5.3f} & {sci_not(self.hi):23} \\" + else: + return rf" {name:27} & {sci_not(self.lo):23} & {round(self.o1, 3):5.3f} & {sci_not(self.med):23} & {round(self.o2, 3):5.3f} & {sci_not(self.hi):23} & {round(self.o3, 3):5.3f} & {sci_not(self.vhi):23} \\" -class ConvergenceData(): +class ConvergenceData2(): def __init__(self): self.data = [] def add_variable(self, name, lo, order1, med, order2, hi): self.data.append(Variable(name, lo, order1, med, order2, hi)) -def read_convergence(file_lo, file_hi): +class ConvergenceData3(): + def __init__(self): + self.data = [] + + def add_variable(self, name, lo, order1, med, order2, hi, order3, vhi): + self.data.append(Variable(name, lo, order1, med, order2, hi, order3, vhi)) + +def read_convergence(file_lo, file_hi, file_vhi): # we'll wait until we find the L1 data lines_lo = [] - found_l1 = False - with open(file_lo, "r") as flo: - for line in flo: - if "L1 norm" in line: - found_l1 = True - continue - if not found_l1: - continue - # value data lines have 4 columns - if len(line.split()) == 4: - lines_lo.append(line.strip()) - lines_hi = [] - found_l1 = False - with open(file_hi, "r") as fhi: - for line in fhi: - if "L1 norm" in line: - found_l1 = True - continue - if not found_l1: + lines_vhi = [] + + fdata = [(lines_lo, file_lo), (lines_hi, file_hi)] + if file_vhi is not None: + fdata.append((lines_vhi, file_vhi)) + + for lines, filec in fdata: + found_l1 = False + with open(filec, "r") as fc: + for line in fc: + if "L1 norm" in line: + found_l1 = True + continue + if not found_l1: + continue + # value data lines have 4 columns + if len(line.split()) == 4: + lines.append(line.strip()) + + if file_vhi is None: + + cd = ConvergenceData2() + + for llo, lhi in zip(lines_lo, lines_hi): + + vlo, elo, o1, emed1 = llo.split() + vhi, emed2, o2, ehi = lhi.split() + + if "---" in o1 or "---" in o2: + print("skipping {}".format(vlo)) continue - # value data lines have 4 columns - if len(line.split()) == 4: - lines_hi.append(line.strip()) - cd = ConvergenceData() + if vlo != vhi: + sys.exit("error: variable mismatch") + + if emed1.strip() != emed2.strip(): + print(emed1, emed2) + sys.exit("error: error mismatch") + + cd.add_variable(vlo, elo, o1, emed1, o2, ehi) - for llo, lhi in zip(lines_lo, lines_hi): + else: - vlo, elo, o1, emed1 = llo.split() - vhi, emed2, o2, ehi = lhi.split() + cd = ConvergenceData3() - if "---" in o1 or "---" in o2: - print("skipping {}".format(vlo)) - continue + for llo, lhi, lvhi in zip(lines_lo, lines_hi, lines_vhi): + + vlo, elo, o1, emed1 = llo.split() + vhi, emed2, o2, ehi1 = lhi.split() + vvhi, ehi2, o3, evhi = lvhi.split() + + if "---" in o1 or "---" in o2 or "---" in o3: + print("skipping {}".format(vlo)) + continue - if vlo != vhi: - sys.exit("error: variable mismatch") + if vlo != vhi or vlo != vvhi: + sys.exit("error: variable mismatch") - if emed1.strip() != emed2.strip(): - print(emed1, emed2) - sys.exit("error: error mismatch") + if emed1.strip() != emed2.strip() or ehi1.strip() != ehi2.strip(): + print(emed1, emed2, ehi1, ehi2) + print(llo) + print(lhi) + print(lvhi) + sys.exit("error: error mismatch") - cd.add_variable(vlo, elo, o1, emed1, o2, ehi) + cd.add_variable(vlo, elo, o1, emed1, o2, ehi1, o3, evhi) return cd @@ -103,6 +145,8 @@ def read_convergence(file_lo, file_hi): help="name of the low resolution convergence output file") parser.add_argument("hifile", type=str, nargs=1, help="name of the high resolution convergence output file") + parser.add_argument("veryhifile", type=str, nargs="?", default=None, + help="(optional) name of the very high resolution convergence output file") args = parser.parse_args() @@ -129,8 +173,10 @@ def read_convergence(file_lo, file_hi): # sdc4 file_lo = args.lofile[0] file_hi = args.hifile[0] + file_vhi = args.veryhifile + print(file_vhi) - sdc4 = read_convergence(file_lo, file_hi) + sdc4 = read_convergence(file_lo, file_hi, file_vhi) for v in sdc4.data: if v.name in good_vars.keys(): diff --git a/Exec/reacting_tests/nse_test/inputs.128 b/Exec/reacting_tests/nse_test/inputs.128 index 47789635dc..3ce059a1a9 100644 --- a/Exec/reacting_tests/nse_test/inputs.128 +++ b/Exec/reacting_tests/nse_test/inputs.128 @@ -1,6 +1,6 @@ # ------------------ INPUTS TO MAIN PROGRAM ------------------- max_step = 15000 -stop_time = 0.025 +stop_time = 0.032 # PROBLEM SIZE & GEOMETRY geometry.is_periodic = 1 1 1 diff --git a/Exec/reacting_tests/nse_test/inputs.256 b/Exec/reacting_tests/nse_test/inputs.256 index 8e7234ba22..766365063f 100644 --- a/Exec/reacting_tests/nse_test/inputs.256 +++ b/Exec/reacting_tests/nse_test/inputs.256 @@ -1,6 +1,6 @@ # ------------------ INPUTS TO MAIN PROGRAM ------------------- max_step = 15000 -stop_time = 0.025 +stop_time = 0.032 # PROBLEM SIZE & GEOMETRY geometry.is_periodic = 1 1 1 diff --git a/Exec/reacting_tests/nse_test/inputs.32 b/Exec/reacting_tests/nse_test/inputs.32 new file mode 100644 index 0000000000..6b807f578d --- /dev/null +++ b/Exec/reacting_tests/nse_test/inputs.32 @@ -0,0 +1,74 @@ +# ------------------ INPUTS TO MAIN PROGRAM ------------------- +max_step = 15000 +stop_time = 0.032 + +# PROBLEM SIZE & GEOMETRY +geometry.is_periodic = 1 1 1 +geometry.coord_sys = 0 # 0 => cart, 1 => RZ 2=>spherical +geometry.prob_lo = 0 0 0 +geometry.prob_hi = 2.e7 2.e7 2.e7 +amr.n_cell = 32 32 32 + + +# >>>>>>>>>>>>> BC FLAGS <<<<<<<<<<<<<<<< +# 0 = Interior 3 = Symmetry +# 1 = Inflow 4 = SlipWall +# 2 = Outflow 5 = NoSlipWall +# >>>>>>>>>>>>> BC FLAGS <<<<<<<<<<<<<<<< +castro.lo_bc = 0 0 0 +castro.hi_bc = 0 0 0 + +# WHICH PHYSICS +castro.do_hydro = 1 +castro.do_react = 1 + +castro.ppm_type = 1 +castro.ppm_temp_fix = 0 + +castro.use_flattening = 1 + +castro.riemann_solver = 0 + +castro.small_temp = 1.e7 + +# TIME STEP CONTROL +castro.cfl = 0.8 # cfl number for hyperbolic system +castro.init_shrink = 1.0 # scale back initial timestep +castro.change_max = 1.1 # scale back initial timestep +castro.fixed_dt = 4.e-4 + +# DIAGNOSTICS & VERBOSITY +castro.sum_interval = 1 # timesteps between computing mass +castro.v = 1 # verbosity in Castro.cpp +amr.v = 1 # verbosity in Amr.cpp +#amr.grid_log = grdlog # name of grid logging file + +# REFINEMENT / REGRIDDING +amr.max_level = 0 # maximum level number allowed +amr.ref_ratio = 2 2 2 2 # refinement ratio +amr.regrid_int = 2 2 2 2 # how often to regrid +amr.blocking_factor = 8 # block factor in grid generation +amr.max_grid_size = 64 +amr.n_error_buf = 2 2 2 2 # number of buffer cells in error est + +# CHECKPOINT FILES +amr.checkpoint_files_output = 0 +amr.check_file = nse_test_32_chk # root name of checkpoint file +amr.check_int = 300 # number of timesteps between checkpoints + +# PLOTFILES +amr.plot_file = nse_test_32_plt # root name of plotfile +amr.plot_per = 0.24 +amr.derive_plot_vars = ALL + +# problem initialization + +problem.T0 = 5.e9 +problem.dT_fact = 0.2 +problem.rho0 = 1.e9 +problem.L_pert = 2.e7 +problem.nse_tol = 1.e2 + +# microphysics + +network.nse_table_interp_linear = 0 diff --git a/Exec/reacting_tests/nse_test/inputs.512 b/Exec/reacting_tests/nse_test/inputs.512 index 6426052e86..d1437273fe 100644 --- a/Exec/reacting_tests/nse_test/inputs.512 +++ b/Exec/reacting_tests/nse_test/inputs.512 @@ -1,6 +1,6 @@ # ------------------ INPUTS TO MAIN PROGRAM ------------------- max_step = 15000 -stop_time = 0.025 +stop_time = 0.032 # PROBLEM SIZE & GEOMETRY geometry.is_periodic = 1 1 1 diff --git a/Exec/reacting_tests/nse_test/inputs.64 b/Exec/reacting_tests/nse_test/inputs.64 index d066be3e1e..5e2867b41a 100644 --- a/Exec/reacting_tests/nse_test/inputs.64 +++ b/Exec/reacting_tests/nse_test/inputs.64 @@ -1,6 +1,6 @@ # ------------------ INPUTS TO MAIN PROGRAM ------------------- max_step = 15000 -stop_time = 0.025 +stop_time = 0.032 # PROBLEM SIZE & GEOMETRY geometry.is_periodic = 1 1 1 From 6011d8e80d81d86711bef0caad925286a0aac87d Mon Sep 17 00:00:00 2001 From: Michael Zingale Date: Mon, 1 Jan 2024 14:59:29 -0500 Subject: [PATCH 2/5] update actions to the latest versions (#2692) also switch to using setup-python's own pip caching --- .github/workflows/c-linter.yml | 4 ++-- .github/workflows/check-ifdefs.yml | 13 ++--------- .github/workflows/check-makefiles.yml | 13 ++--------- .github/workflows/check-params.yml | 13 ++--------- .github/workflows/codespell.yml | 12 ++-------- .github/workflows/compiler-warnings.yml | 2 +- .github/workflows/create_release.yml | 4 ++-- .github/workflows/detonation-sdc-compare.yml | 2 +- .github/workflows/docs-test.yml | 16 ++++--------- .github/workflows/exact_riemann.yml | 2 +- .github/workflows/flame_wave-compare.yml | 2 +- .github/workflows/gh-pages.yml | 23 +++++-------------- .github/workflows/gpu_action.yml | 2 +- .github/workflows/hip.yml | 2 +- .github/workflows/mhd-compare.yml | 2 +- .github/workflows/rad-compare.yml | 2 +- .../reacting-convergence-true-sdc.yml | 2 +- .github/workflows/sedov-compare.yml | 2 +- .github/workflows/uniform_cube.yml | 2 +- .github/workflows/uniform_sphere.yml | 2 +- .../workflows/wdmerger_collision-compare.yml | 2 +- 21 files changed, 35 insertions(+), 89 deletions(-) diff --git a/.github/workflows/c-linter.yml b/.github/workflows/c-linter.yml index 295ac4ede4..435a7fa60a 100644 --- a/.github/workflows/c-linter.yml +++ b/.github/workflows/c-linter.yml @@ -6,7 +6,7 @@ jobs: cpp-linter: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -56,7 +56,7 @@ jobs: -run-linter - name: Archive clang tidy report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: clang-tidy-report path: clang-tidy-report.txt diff --git a/.github/workflows/check-ifdefs.yml b/.github/workflows/check-ifdefs.yml index 5558864238..3f4ab8daae 100644 --- a/.github/workflows/check-ifdefs.yml +++ b/.github/workflows/check-ifdefs.yml @@ -19,18 +19,9 @@ jobs: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' - - - name: Cache pip - uses: actions/cache@v3 - with: - # this path is specific to Ubuntu - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- + python-version: '3.11' - name: Run check-ifdefs run: | diff --git a/.github/workflows/check-makefiles.yml b/.github/workflows/check-makefiles.yml index 396393f383..33a21d604b 100644 --- a/.github/workflows/check-makefiles.yml +++ b/.github/workflows/check-makefiles.yml @@ -19,18 +19,9 @@ jobs: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' - - - name: Cache pip - uses: actions/cache@v3 - with: - # this path is specific to Ubuntu - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- + python-version: '3.11' - name: Run check-ifdefs run: | diff --git a/.github/workflows/check-params.yml b/.github/workflows/check-params.yml index 324a0eb05f..992e6b9be8 100644 --- a/.github/workflows/check-params.yml +++ b/.github/workflows/check-params.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -28,19 +28,10 @@ jobs: cd ../.. - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' - - name: Cache pip - uses: actions/cache@v3 - with: - # this path is specific to Ubuntu - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Run check-params run: | PYTHONPATH=external/Microphysics/util/build_scripts python .github/workflows/check_params.py . diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index f5191f1bd7..6964398e22 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -22,15 +22,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: '3.10' - - - name: Cache pip - uses: actions/cache@v3 - with: - # this path is specific to Ubuntu - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: "pip" - name: Install dependencies run: pip install -r ./requirements.txt diff --git a/.github/workflows/compiler-warnings.yml b/.github/workflows/compiler-warnings.yml index 1f95139323..0f6bd12fb1 100644 --- a/.github/workflows/compiler-warnings.yml +++ b/.github/workflows/compiler-warnings.yml @@ -5,7 +5,7 @@ jobs: compiler_warnings: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml index 3d021ab174..bd9d8795a8 100644 --- a/.github/workflows/create_release.yml +++ b/.github/workflows/create_release.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get the version id: get_version @@ -33,4 +33,4 @@ jobs: release_name: Release ${{ github.ref }} body: ${{ env.RELEASE_TXT }} draft: false - prerelease: false \ No newline at end of file + prerelease: false diff --git a/.github/workflows/detonation-sdc-compare.yml b/.github/workflows/detonation-sdc-compare.yml index b50dd3f91c..be850b35cf 100644 --- a/.github/workflows/detonation-sdc-compare.yml +++ b/.github/workflows/detonation-sdc-compare.yml @@ -5,7 +5,7 @@ jobs: detonation-sdc: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/docs-test.yml b/.github/workflows/docs-test.yml index 054ca5a8f5..be80c197fc 100644 --- a/.github/workflows/docs-test.yml +++ b/.github/workflows/docs-test.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install pandoc and doxygen @@ -22,18 +22,10 @@ jobs: sudo apt install pandoc doxygen - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' - - - name: Cache pip - uses: actions/cache@v3 - with: - # this path is specific to Ubuntu - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- + python-version: '3.11' + cache: "pip" - name: Install dependencies run: pip install -r ./requirements.txt diff --git a/.github/workflows/exact_riemann.yml b/.github/workflows/exact_riemann.yml index d369a87e78..996684ba78 100644 --- a/.github/workflows/exact_riemann.yml +++ b/.github/workflows/exact_riemann.yml @@ -5,7 +5,7 @@ jobs: exact_riemann: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/flame_wave-compare.yml b/.github/workflows/flame_wave-compare.yml index efb4abccc0..46ad5b4285 100644 --- a/.github/workflows/flame_wave-compare.yml +++ b/.github/workflows/flame_wave-compare.yml @@ -5,7 +5,7 @@ jobs: flame_wave-2d: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index d687274828..d5b3995fa3 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -10,7 +10,7 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install pandoc and doxygen run: | @@ -18,27 +18,16 @@ jobs: sudo apt install pandoc doxygen - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' + cache: "pip" - name: Upgrade pip run: | # install pip=>20.1 to use "pip cache dir" python3 -m pip install --upgrade pip - - name: Get pip cache dir - id: pip-cache - run: echo "::set-output name=dir::$(pip cache dir)" - - - name: Cache dependencies - uses: actions/cache@v3 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Install dependencies run: python3 -m pip install -r ./requirements.txt @@ -48,7 +37,7 @@ jobs: GITHUB_BRANCH: 'main' run: ./deploy_docs_action.sh - - name: Build docs + - name: Build docs if: ${{ endsWith(github.ref, 'development') }} env: GITHUB_BRANCH: 'development' @@ -59,4 +48,4 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./out - keep_files: true \ No newline at end of file + keep_files: true diff --git a/.github/workflows/gpu_action.yml b/.github/workflows/gpu_action.yml index 10843ef256..6861161cbb 100644 --- a/.github/workflows/gpu_action.yml +++ b/.github/workflows/gpu_action.yml @@ -5,7 +5,7 @@ jobs: gpu-compilation: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/hip.yml b/.github/workflows/hip.yml index d00b3c9954..f74b1e7c7e 100644 --- a/.github/workflows/hip.yml +++ b/.github/workflows/hip.yml @@ -10,7 +10,7 @@ jobs: hip-compile: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/mhd-compare.yml b/.github/workflows/mhd-compare.yml index 8df15f2574..403ff79f5e 100644 --- a/.github/workflows/mhd-compare.yml +++ b/.github/workflows/mhd-compare.yml @@ -5,7 +5,7 @@ jobs: OrszagTang-3d: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/rad-compare.yml b/.github/workflows/rad-compare.yml index b123c972c8..775024000d 100644 --- a/.github/workflows/rad-compare.yml +++ b/.github/workflows/rad-compare.yml @@ -5,7 +5,7 @@ jobs: Rad2Tshock-1d: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/reacting-convergence-true-sdc.yml b/.github/workflows/reacting-convergence-true-sdc.yml index cc8dd1d471..bd810420f8 100644 --- a/.github/workflows/reacting-convergence-true-sdc.yml +++ b/.github/workflows/reacting-convergence-true-sdc.yml @@ -5,7 +5,7 @@ jobs: reacting-convergence-true-sdc: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/sedov-compare.yml b/.github/workflows/sedov-compare.yml index 06cfca49f4..dd484b8525 100644 --- a/.github/workflows/sedov-compare.yml +++ b/.github/workflows/sedov-compare.yml @@ -5,7 +5,7 @@ jobs: Sedov-3d: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/uniform_cube.yml b/.github/workflows/uniform_cube.yml index 33d7e8b5df..edff5a8494 100644 --- a/.github/workflows/uniform_cube.yml +++ b/.github/workflows/uniform_cube.yml @@ -5,7 +5,7 @@ jobs: uniform_cube: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/uniform_sphere.yml b/.github/workflows/uniform_sphere.yml index 1dbd01f90b..e9ad3b9e4e 100644 --- a/.github/workflows/uniform_sphere.yml +++ b/.github/workflows/uniform_sphere.yml @@ -5,7 +5,7 @@ jobs: uniform_sphere: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/wdmerger_collision-compare.yml b/.github/workflows/wdmerger_collision-compare.yml index cd1e9c8b5f..99551f09dd 100644 --- a/.github/workflows/wdmerger_collision-compare.yml +++ b/.github/workflows/wdmerger_collision-compare.yml @@ -5,7 +5,7 @@ jobs: wdmerger_collision-2d: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 From b1e97c6ed804d96687ee7b744ddd8f955324e6bf Mon Sep 17 00:00:00 2001 From: Michael Zingale Date: Tue, 2 Jan 2024 11:02:27 -0500 Subject: [PATCH 3/5] update to 24.01 --- external/Microphysics | 2 +- external/amrex | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/external/Microphysics b/external/Microphysics index 49c97127a4..9d0655b75c 160000 --- a/external/Microphysics +++ b/external/Microphysics @@ -1 +1 @@ -Subproject commit 49c97127a43700ac5d7f9ac5c3aa296a0c9695a7 +Subproject commit 9d0655b75c2d7c0690fe637f8491fa572227a1dc diff --git a/external/amrex b/external/amrex index 2f47fa7361..a068330e6c 160000 --- a/external/amrex +++ b/external/amrex @@ -1 +1 @@ -Subproject commit 2f47fa7361bbf5793503cfb31b717bece889bde0 +Subproject commit a068330e6c66b5d9a7c6ca0e1c874f318e73f4cc From a7a2f79408993f2ed49ee741881ffca2831c2808 Mon Sep 17 00:00:00 2001 From: Michael Zingale Date: Tue, 2 Jan 2024 11:05:43 -0500 Subject: [PATCH 4/5] update changes --- CHANGES.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 751300868c..0bb87dc598 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,13 @@ +# 24.01 + + * An option for unlimited PPM reconstruction was added (#2670) + + * An option allowing for optional passive sources to the conserved + state was added (#2678) + + * A script `diag_parser.py` was added to allow for easy parsing of + the global diagnostic files output at runtime (#2666, #2667) + # 23.12 * The radiation solver port to C++ has been completed (#2638, #2648) From f293d91f46fa38582197ddb23cbd782c32d11a3e Mon Sep 17 00:00:00 2001 From: Michael Zingale Date: Tue, 2 Jan 2024 11:07:51 -0500 Subject: [PATCH 5/5] start of moving runtime parameters to structs (#2688) This addresses the first part of #2685 Depends on AMReX-Astro/Microphysics#1422 --- Source/driver/Castro.H | 9 +++- Source/driver/Castro.cpp | 2 + Source/driver/parse_castro_params.py | 61 ++++++++++++++++++++++++---- Util/scripts/write_probdata.py | 2 +- 4 files changed, 64 insertions(+), 10 deletions(-) diff --git a/Source/driver/Castro.H b/Source/driver/Castro.H index 755a540984..9cc45996c3 100644 --- a/Source/driver/Castro.H +++ b/Source/driver/Castro.H @@ -48,6 +48,8 @@ constexpr int PSTAR_BISECT_FACTOR = 5; #include #include +#include + using std::istream; using std::ostream; @@ -535,7 +537,6 @@ public: #include #endif - /// /// Estimate time step. /// @@ -1198,6 +1199,12 @@ public: static Vector > data_logs; static Vector > problem_data_logs; +/// +/// runtime parameters +// + static params_t params; + + protected: diff --git a/Source/driver/Castro.cpp b/Source/driver/Castro.cpp index 9d0855d1d5..f10c59b0e7 100644 --- a/Source/driver/Castro.cpp +++ b/Source/driver/Castro.cpp @@ -76,6 +76,8 @@ int Castro::NUM_GROW_SRC = -1; int Castro::lastDtPlotLimited = 0; Real Castro::lastDtBeforePlotLimiting = 0.0; +params_t Castro::params; + Real Castro::num_zones_advanced = 0.0; Vector Castro::source_names; diff --git a/Source/driver/parse_castro_params.py b/Source/driver/parse_castro_params.py index cfa2ebb30c..aedeea6624 100755 --- a/Source/driver/parse_castro_params.py +++ b/Source/driver/parse_castro_params.py @@ -7,7 +7,7 @@ parameters have the format: - name type default need-in-fortran? ifdef + name type default ifdef the first three (name, type, default) are mandatory: @@ -22,8 +22,6 @@ the next are optional: - need-in-fortran: no longer used - ifdef: only define this parameter if the name provided is #ifdef-ed Any line beginning with a "#" is ignored @@ -131,7 +129,7 @@ def read_param_file(infile): return params -def write_headers(params, out_directory): +def write_headers(params, out_directory, struct_name): # output @@ -196,7 +194,7 @@ def write_headers(params, out_directory): cp.write("#endif\n") cp.close() - # write castro_queries.H + # write name_queries.H try: cq = open(f"{out_directory}/{nm}_queries.H", "w", encoding="UTF-8") except OSError: @@ -208,13 +206,15 @@ def write_headers(params, out_directory): if ifdef is None: for p in [q for q in params_nm if q.ifdef is None]: cq.write(p.get_default_string()) - cq.write(p.get_query_string("C++")) + cq.write(p.get_query_string()) + cq.write(p.get_query_struct_string(struct_name=struct_name, class_name="Castro")) cq.write("\n") else: cq.write(f"#ifdef {ifdef}\n") for p in [q for q in params_nm if q.ifdef == ifdef]: cq.write(p.get_default_string()) - cq.write(p.get_query_string("C++")) + cq.write(p.get_query_string()) + cq.write(p.get_query_struct_string(struct_name=struct_name, class_name="Castro")) cq.write("\n") cq.write("#endif\n") cq.write("\n") @@ -238,19 +238,64 @@ def write_headers(params, out_directory): jo.close() + # now write a single file that contains all of the parameter structs + try: + sf = open(f"{out_directory}/{struct_name}_type.H", "w", encoding="UTF-8") + except OSError: + sys.exit(f"unable to open {struct_name}_type.H for writing") + + sf.write(CWARNING) + sf.write(f"#ifndef {struct_name.upper()}_TYPE_H\n") + sf.write(f"#define {struct_name.upper()}_TYPE_H\n\n") + + sf.write("#include \n\n") + + for nm in namespaces: + + params_nm = [q for q in params if q.namespace == nm] + # sort by repr since None may be present + ifdefs = sorted({q.ifdef for q in params_nm}, key=repr) + + sf.write(f"struct {nm}_t {{\n") + print("namespace = ", nm) + for ifdef in ifdefs: + if ifdef is None: + for p in [q for q in params_nm if q.ifdef is None]: + sf.write(p.get_struct_entry()) + else: + sf.write(f"#ifdef {ifdef}\n") + for p in [q for q in params_nm if q.ifdef == ifdef]: + sf.write(p.get_struct_entry()) + sf.write("#endif\n") + + + sf.write("};\n\n") + + # now the parent struct + + sf.write(f"struct {struct_name}_t {{\n") + for nm in namespaces: + sf.write(f" {nm}_t {nm};\n") + sf.write("};\n\n") + + sf.write("#endif\n") + sf.close() + def main(): """the main driver""" parser = argparse.ArgumentParser() parser.add_argument("-o", type=str, default=None, help="output directory for the generated files") + parser.add_argument("-s", type=str, default="params", + help="name for the name struct that will hold the parameters") parser.add_argument("input_file", type=str, nargs=1, help="input file containing the list of parameters we will define") args = parser.parse_args() p = read_param_file(args.input_file[0]) - write_headers(p, args.o) + write_headers(p, args.o, args.s) if __name__ == "__main__": main() diff --git a/Util/scripts/write_probdata.py b/Util/scripts/write_probdata.py index 2e44621cdc..68cb716aac 100755 --- a/Util/scripts/write_probdata.py +++ b/Util/scripts/write_probdata.py @@ -244,7 +244,7 @@ def write_probin(prob_param_files, cxx_prefix): fout.write(f" {p.get_default_string()}") if p.in_namelist: - fout.write(f" {p.get_query_string('C++')}") + fout.write(f" {p.get_query_string()}") fout.write("\n") fout.write(" }\n")