diff --git a/.gitmodules b/.gitmodules index b43d0a53..a581b0c8 100644 --- a/.gitmodules +++ b/.gitmodules @@ -5,8 +5,8 @@ [submodule "DA_update"] path = DA_update url = https://github.com/ufs-community/land-DA.git - branch = feature/porting + branch = develop [submodule "vector2tile"] path = vector2tile url = https://github.com/NOAA-PSL/land-vector2tile.git - branch = develop + branch = develop diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..95b6d849 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,31 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: "3.9" + # You can also specify other tool versions: + # nodejs: "16" + # rust: "1.55" + # golang: "1.17" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/source/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt + + diff --git a/DA_update b/DA_update index a81c8fd5..5c8c47ac 160000 --- a/DA_update +++ b/DA_update @@ -1 +1 @@ -Subproject commit a81c8fd5ec4a6c571f76ce3426f2b0777a6d2349 +Subproject commit 5c8c47acf683ccbd6a3769ff39e6b88efd387198 diff --git a/README.md b/README.md index d55de12e..98c6927b 100644 --- a/README.md +++ b/README.md @@ -6,11 +6,11 @@ The UFS includes [multiple applications](https://ufscommunity.org/science/abouta The offline Noah-MP LSM is a standalone, uncoupled model used to execute land surface simulations. In this traditional uncoupled mode, near-surface atmospheric forcing data is required as input forcing. This LSM simulates soil moisture (both liquid and frozen), soil temperature, skin temperature, snow depth, snow water equivalent (SWE), snow density, canopy water content, and the energy flux and water flux terms of the surface energy balance and surface water balance. Its data assimilation framework applies the Local Ensemble Transform Kalman Filter-Optimal Interpolation (LETKF-OI) algorithm to combine the state-dependent background error derived from an ensemble forecast with the observations and their corresponding uncertainties to produce an analysis ensemble (Hunt et al., 2007). -The Noah-MP LSM has evolved through community efforts to pursue and refine a modern-era LSM suitable for use in the National Centers for Environmental Prediction (NCEP) operational weather and climate prediction models. This collaborative effort continues with participation from entities such as NCAR, NCEP, NASA, and university groups. The development branch of the Land DA System is continually evolving as the system undergoes open development. The latest Land DA release (v1.1.0) represents a snapshot of this continuously evolving system. +The Noah-MP LSM has evolved through community efforts to pursue and refine a modern-era LSM suitable for use in the National Centers for Environmental Prediction (NCEP) operational weather and climate prediction models. This collaborative effort continues with participation from entities such as NCAR, NCEP, NASA, and university groups. The development branch of the Land DA System is continually evolving as the system undergoes open development. The latest Land DA release (v1.2.0) represents a snapshot of this continuously evolving system. -The Land DA System User's Guide associated with the development branch is at: https://land-da.readthedocs.io/en/develop/, while the guide specific to the Land DA v1.1.0 release can be found at: https://land-da-workflow.readthedocs.io/en/release-public-v1.1.0/. Users may download data for use with the most recent release from the [Land DA data bucket](https://noaa-ufs-land-da-pds.s3.amazonaws.com/index.html#current_land_da_release_data/). The [Land DA Docker Hub](https://hub.docker.com/r/noaaepic/ubuntu20.04-intel-landda) hosts Land DA containers. These containers package the Land DA System together with all its software dependencies for an easier experience building and running Land DA. +The Land DA System User's Guide associated with the development branch is at: https://land-da.readthedocs.io/en/develop/, while the guide specific to the Land DA v1.2.0 release can be found at: https://land-da-workflow.readthedocs.io/en/release/public-v1.2.0/. Users may download data for use with the most recent release from the [Land DA data bucket](https://noaa-ufs-land-da-pds.s3.amazonaws.com/index.html#current_land_da_release_data/v1.2.0). The [Land DA Docker Hub](https://hub.docker.com/r/noaaepic/ubuntu20.04-intel-landda) hosts Land DA containers. These containers package the Land DA System together with all its software dependencies for an easier experience building and running Land DA. For any publications based on work with the UFS Offline Land Data Assimilation System, please include a citation to the DOI below: -UFS Development Team. (2023, March 6). Unified Forecast System (UFS) Land Data Assimilation (DA) System (Version v1.1.0). Zenodo. https://doi.org/10.5281/zenodo.7675721 +UFS Development Team. (2023, Dec. 11). Unified Forecast System (UFS) Land Data Assimilation (DA) System (Version v1.2.0). Zenodo. https://doi.org/10.5281/zenodo.7675721 diff --git a/docs/source/BackgroundInfo/Introduction.rst b/docs/source/BackgroundInfo/Introduction.rst new file mode 100644 index 00000000..e71d4361 --- /dev/null +++ b/docs/source/BackgroundInfo/Introduction.rst @@ -0,0 +1,156 @@ +.. _Intro: + +**************** +Introduction +**************** + +This User's Guide provides guidance for running the Unified Forecast System +(:term:`UFS`) offline Land Data Assimilation (DA) System. Land DA is an offline version of the Noah Multi-Physics (Noah-MP) land surface model (LSM) used in the `UFS Weather Model `__ (WM). Its data assimilation framework uses +the Joint Effort for Data assimilation Integration (:term:`JEDI`) software. The offline UFS Land DA System currently only works with snow data. +Thus, this User's Guide focuses primarily on the snow DA process. + +Since the last release, developers have added a variety of features: + +* Integration of the UFS Noah-MP land component into the Land DA System for use as an alternative to the Common Community Physics Package (:term:`CCPP`) Noah-MP LSM land driver +* Model forcing options for use with the UFS land component: + + * Provided a new analysis option in the cubed-sphere native grid using :term:`GSWP3` forcing + * Established global land grid-point consistency with the head of the UFS WM baseline test cases (New global land grid point is changed from 18360 to 18322.) + * Added a new sample configuration file (``settings_DA_cycle_gswp3``) + * Included a new ECMWF :term:`ERA5` reanalysis forcing option in the existing vector-to-tile conversion analysis process + +* CTest suite upgrades --- the ERA5 CTests now test the operability of seven major components of Land DA: vector2tile, create_ens, letkfoi_snowda, apply_jediincr, tile2vector, land_driver, and UFS datm_land. +* Upgrade of JEDI :term:`DA ` framework to use JEDI Skylab v4.0 (`PR #28 `__) +* Updates to sample datasets for the release (see the `Land DA data bucket `__) +* Singularity/Apptainer container (``ubuntu20.04-intel-landda-release-public-v1.2.0``) updates to support the changes described above +* Documentation updates to reflect the changes above + +The Land DA System citation is as follows and should be used when presenting results based on research conducted with the Land DA System: + +UFS Development Team. (2023, December 11). Unified Forecast System (UFS) Land Data Assimilation (DA) System (Version v1.2.0). Zenodo. https://doi.org/10.5281/zenodo.7675721 + + +Organization +************** + +This User's Guide is organized into four sections: (1) *Background Information*; (2) *Building, Running, and Testing the Land DA System*; (3) *Customizing the Workflow*; and (4) *Reference*. + +Background Information +======================== + * This chapter (Introduction) provides background information on the Unified Forecast System (:term:`UFS`) and the NoahMP model. + * :numref:`Chapter %s ` (Technical Overview) outlines prerequisites, user support levels, and directory structure. + +Building, Running, and Testing the Land DA System +=================================================== + + * :numref:`Chapter %s: Land DA Workflow ` explains how to build and run the Land DA System on :ref:`Level 1 ` systems (currently Hera and Orion). + * :numref:`Chapter %s: Containerized Land DA Workflow ` explains how to build and run the containerized Land DA System on non-Level 1 systems. + * :numref:`Chapter %s: Testing the Land DA Workflow ` explains how to run the Land DA CTests. + +Customizing the Workflow +========================= + + * :numref:`Chapter %s: Model ` provides information on input data and configuration parameters in the Noah-MP LSM and its Vector-to-Tile Converter. + * :numref:`Chapter %s: DA Framework ` provides information on the DA system, required data, and configuration parameters. + +Reference +=========== + + * :numref:`Chapter %s: Glossary ` lists important terms. + +User Support and Documentation +******************************** + +Questions +========== + +The Land DA System's `GitHub Discussions `__ forum provides online support for UFS users and developers to post questions and exchange information. When users encounter difficulties running the Land DA System, this is the place to post. Users can expect an initial response within two business days. + +When posting a question, it is recommended that users provide the following information: + +* The platform or system being used (e.g., Hera, Orion, container, MacOS, Linux) +* The version of the Land DA System being used (e.g., ``develop``, ``release/public-v1.1.0``). (To determine this, users can run ``git branch``, and the name of the branch with an asterisk ``*`` in front of it is the name of the branch or tag they are working with.) Note that the Land DA version being used and the version of the documentation being used should match, or users will run into difficulties. +* Stage of the application when the issue appeared (i.e., build/compilation, configuration, or forecast run) +* Contents of relevant configuration files +* Full error message (preferably in text form rather than a screenshot) +* Current shell (e.g., bash, csh) and modules loaded +* Compiler + MPI combination being used +* Run directory and code directory, if available on supported platforms + +Bug Reports +============ + +If users (especially new users) believe they have identified a bug in the system, it is recommended that they first ask about the problem in `GitHub Discussions `__, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a `GitHub Discussion `__ post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search `open issues `__ to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a `GitHub Issue `__. + +Feature Requests and Enhancements +================================== + +Users who want to request a feature enhancement or the addition of a new feature have two options: + + #. File a `GitHub Issue `__ and add (or request that a code manager add) the ``EPIC Support Requested`` label. + #. Post a request for a feature or enhancement in the `Enhancements `__ category of GitHub Discussions. These feature requests will be forwarded to the Earth Prediction Innovation Center (`EPIC `__) management team for prioritization and eventual addition to the Land DA System. + + +.. _Background: + +Background Information +************************ + +Unified Forecast System (UFS) +=============================== + +The UFS is a community-based, coupled, comprehensive Earth modeling system. It includes `multiple applications `__ that support different forecast durations and spatial domains. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from many different modeling systems. +The UFS is designed to enable research, development, and contribution +opportunities within the broader :term:`Weather Enterprise` (including +government, industry, and academia). For more information about the UFS, visit the `UFS Portal `__. + + +.. _NoahMP: + +Noah-MP +========== + +The offline Noah-MP LSM is a stand-alone, uncoupled model used to execute land surface simulations. In this traditional uncoupled mode, near-surface atmospheric :term:`forcing data` are required as input forcing. This LSM simulates soil moisture (both liquid and frozen), soil temperature, skin temperature, snow depth, snow water equivalent (SWE), snow density, canopy water content, and the energy flux and water flux terms of the surface energy balance and surface water balance. + +Noah-MP uses a big-leaf approach with a separated vegetation canopy accounting +for vegetation effects on surface energy and water balances, a modified two-stream +approximation scheme to include the effects of vegetation canopy gaps that vary +with solar zenith angle and the canopy 3-D structure on radiation transfer, +a 3-layer physically-based snow model, a more permeable frozen soil by separating +a grid cell into a permeable fraction and impermeable fraction, a simple +groundwater model with a TOPMODEL-based runoff scheme, and a short-term leaf +phenology model. Noah-MP LSM enables a modular framework for diagnosing differences +in process representation, facilitating ensemble forecasts and uncertainty +quantification, and choosing process presentations appropriate for the application. +Noah-MP developers designed multiple parameterization options for leaf dynamics, +radiation transfer, stomatal resistance, soil moisture stress factor for stomatal +resistance, aerodynamic resistance, runoff, snowfall, snow surface albedo, +supercooled liquid water in frozen soil, and frozen soil permeability. + +The Noah-MP LSM has evolved through community efforts to pursue and refine a modern-era LSM suitable for use in the National Centers for Environmental Prediction (NCEP) operational weather and climate prediction models. This collaborative effort continues with participation from entities such as NCAR, NCEP, NASA, and university groups. + +Noah-MP has been implemented in the UFS via the :term:`CCPP` physics package and +is currently being tested for operational use in GFSv17 and RRFS v2. Additionally, the UFS Weather Model now contains a Noah-MP land component. Noah-MP has +also been used operationally in the NOAA National Water Model (NWM) since 2016. Details about the model's physical parameterizations can be found in :cite:t:`NiuEtAl2011` (2011). + +Disclaimer +************* + +The United States Department of Commerce (DOC) GitHub project code is +provided on an “as is” basis and the user assumes responsibility for its +use. DOC has relinquished control of the information and no longer has a +responsibility to protect the integrity, confidentiality, or +availability of the information. Any claims against the Department of +Commerce stemming from the use of its GitHub project will be governed by +all applicable Federal laws. Any reference to specific commercial +products, processes, or services by service mark, trademark, +manufacturer, or otherwise, does not constitute or imply their +endorsement, recommendation, or favoring by the Department of Commerce. +The Department of Commerce seal and logo, or the seal and logo of a DOC +bureau, shall not be used in any manner to imply endorsement of any +commercial product or activity by DOC or the United States Government. + +References +************* + +.. bibliography:: ../references.bib \ No newline at end of file diff --git a/docs/source/TechnicalOverview.rst b/docs/source/BackgroundInfo/TechnicalOverview.rst similarity index 73% rename from docs/source/TechnicalOverview.rst rename to docs/source/BackgroundInfo/TechnicalOverview.rst index d974baff..ca1b9a3b 100644 --- a/docs/source/TechnicalOverview.rst +++ b/docs/source/BackgroundInfo/TechnicalOverview.rst @@ -27,12 +27,12 @@ The Land DA System requires: * Python * :term:`NetCDF` * Lmod - * `spack-stack `__ - * `jedi-bundle `__ (Skylab v3.0.) + * `spack-stack `__ + * `jedi-bundle `__ (Skylab v4.0) These software prerequisites are pre-installed in the Land DA :term:`container` and on other Level 1 systems (see :ref:`below ` for details). However, users on non-Level 1 systems will need to install them. -Before using the Land DA container, users will need to install `Singularity `__ and an **Intel** MPI (available `free here `__). +Before using the Land DA container, users will need to install `Singularity/Apptainer `__ and an **Intel** MPI (available `free here `__). .. _LevelsOfSupport: @@ -49,42 +49,38 @@ Four levels of support have been defined for :term:`UFS` applications, and the L Level 1 Systems ================== -Preconfigured (Level 1) systems for Land DA already have the required external libraries available in a central location via the :term:`spack-stack` Unified Environment (UE) and the ``jedi-bundle`` (Skylab v3.0). Land DA is expected to build and run out-of-the-box on these systems, and users can download the Land DA code without first installing prerequisite software. With the exception of the Land DA container, users must have access to these Level 1 systems in order to use them. - -+-----------+-----------------------------------+-----------------------------------------------------------------------------------+ -| Platform | Compiler/MPI | spack-stack & jedi-bundle Installations | -+===========+===================================+===================================================================================+ -| Hera | intel/2022.1.2 / | /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.3.0/envs/unified-env | -| | | | -| | impi/2022.1.2 | /scratch1/NCEPDEV/nems/role.epic/contrib/jedi-bundle | -+-----------+-----------------------------------+-----------------------------------------------------------------------------------+ -| Orion | intel/2022.1.2 / | /work/noaa/epic-ps/role-epic-ps/spack-stack/spack-stack-1.3.0/envs/unified-env | -| | | | -| | impi/2022.1.2 | /work/noaa/epic-ps/role-epic-ps/contrib/jedi-bundle | -+-----------+-----------------------------------+-----------------------------------------------------------------------------------+ -| Container | intel-oneapi-compilers/2021.8.0 / | /opt/spack-stack/ (inside the container) | -| | | | -| | intel-oneapi-mpi/2021.8.0 | /opt/jedi-bundle (inside the container) | -+-----------+-----------------------------------+-----------------------------------------------------------------------------------+ +Preconfigured (Level 1) systems for Land DA already have the required external libraries available in a central location via :term:`spack-stack` and the ``jedi-bundle`` (Skylab v4.0). Land DA is expected to build and run out-of-the-box on these systems, and users can download the Land DA code without first installing prerequisite software. With the exception of the Land DA container, users must have access to these Level 1 systems in order to use them. + ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Platform | Compiler/MPI | spack-stack & jedi-bundle Installations | ++===========+===================================+=================================================================+ +| Hera | intel/2022.1.2 / | /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.3.0 | +| | | | +| | impi/2022.1.2 | /scratch2/NAGAPE/epic/UFS_Land-DA/jedi/jedi-bundle | ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Orion | intel/2022.1.2 / | /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.3.0 | +| | | | +| | impi/2022.1.2 | /work/noaa/epic/UFS_Land-DA/jedi/jedi-bundle | ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Container | intel-oneapi-compilers/2021.8.0 / | /opt/spack-stack/ (inside the container) | +| | | | +| | intel-oneapi-mpi/2021.8.0 | /opt/jedi-bundle (inside the container) | ++-----------+-----------------------------------+-----------------------------------------------------------------+ Level 2-4 Systems =================== On non-Level 1 platforms, the Land DA System can be run within a container that includes the prerequisite software; otherwise, the required libraries will need to be installed as part of the Land DA build process. Once these prerequisite libraries are installed, applications and models should build and run successfully. However, users may need to perform additional troubleshooting on Level 3 or 4 systems since little or no pre-release testing has been conducted on these systems. -.. note:: - - Running on Jet, Cheyenne, and NOAA Cloud systems is supported via container. - Code Repositories and Directory Structure ******************************************** -Directory Structure -====================== +File & Directory Structure +============================ The main repository for the Land DA System is named ``land-DA_workflow``; it is available on GitHub at https://github.com/ufs-community/land-DA_workflow. -A number of submodules are nested under the main ``land-DA_workflow`` directory. +The ``land-DA_workflow`` repository contains a few nested submodules. When the ``develop`` branch of the ``land-DA_workflow`` repository is cloned with the ``--recursive`` argument, the basic directory structure will be similar to the example below. Some files and directories have been removed for brevity. @@ -102,7 +98,8 @@ Directories in parentheses () are only visible after the build step. ├── docs ├── modulefiles ├── test - ├── ufs-land-driver + ├── tile2tile + ├── ufs-land-driver-emc-dev │ └── ccpp-physics ├── (ufs-weather-model) ├── vector2tile @@ -129,7 +126,7 @@ the UFS Land DA System. +--------------------------+-----------------------------------------+------------------------------------------------------+ | Repository Name | Repository Description | Authoritative repository URL | +==========================+=========================================+======================================================+ - | land-DA_update | Contains scripts and components for | https://github.com/ufs-community/land-DA/ | + | DA_update | Contains scripts and components for | https://github.com/ufs-community/land-DA/ | | | performing data assimilation (DA) | | | | procedures. | | +--------------------------+-----------------------------------------+------------------------------------------------------+ @@ -137,17 +134,14 @@ the UFS Land DA System. | | JEDI-generated DA increment to UFS | | | | ``sfc_data`` restart | | +--------------------------+-----------------------------------------+------------------------------------------------------+ - | ufs-land-driver | Repository for the UFS Land | https://github.com/NOAA-EMC/ufs-land-driver | + | ufs-land-driver-emc-dev | Repository for the UFS Land | https://github.com/NOAA-EPIC/ufs-land-driver-emc-dev | | | Driver | | +--------------------------+-----------------------------------------+------------------------------------------------------+ - | *-- ccpp-physics* | Repository for the Common | https://github.com/NCAR/ccpp-physics | + | *-- ccpp-physics* | Repository for the Common | https://github.com/ufs-community/ccpp-physics/ | | | Community Physics Package (CCPP) | | | | | | +--------------------------+-----------------------------------------+------------------------------------------------------+ | land-vector2tile | Contains code to map between the vector | https://github.com/NOAA-PSL/land-vector2tile | - | | format used by the Noah-MP offline | | - | | driver, and the tile format used by the | | - | | UFS atmospheric model. | | +--------------------------+-----------------------------------------+------------------------------------------------------+ The UFS Land Component diff --git a/docs/source/BackgroundInfo/index.rst b/docs/source/BackgroundInfo/index.rst new file mode 100644 index 00000000..3744cd8b --- /dev/null +++ b/docs/source/BackgroundInfo/index.rst @@ -0,0 +1,10 @@ +.. _background-info-index: + +Background Information +======================== + +.. toctree:: + :maxdepth: 3 + + Introduction + TechnicalOverview diff --git a/docs/source/BuildRunLandDA.rst b/docs/source/BuildRunLandDA.rst deleted file mode 100644 index 66e991a0..00000000 --- a/docs/source/BuildRunLandDA.rst +++ /dev/null @@ -1,208 +0,0 @@ -.. _BuildRunLandDA: - -************************************ -Land DA Workflow (Hera & Orion) -************************************ - -This chapter provides instructions for building and running a basic Land DA case for the Unified Forecast System (:term:`UFS`) Land DA System. This out-of-the-box Land DA case builds a weather forecast for January 1, 2016 at 18z to January 3, 2016 at 18z. - -.. attention:: - - These steps are designed for use on :ref:`Level 1 ` systems (i.e., Hera and Orion) and may require significant changes on other systems. It is recommended that users on other systems run the containerized version of Land DA. Users may reference :numref:`Chapter %s: Containerized Land DA Workflow ` for instructions. - -Create a Working Directory -***************************** - -Create a directory for the Land DA experiment (``$LANDDAROOT``): - -.. code-block:: console - - mkdir /path/to/landda - cd /path/to/landda - export LANDDAROOT=`pwd` - -where ``/path/to/landda`` is the path to the directory where the user plans to run Land DA experiments. - -.. _GetData: - -Get Data -*********** - -:numref:`Table %s ` shows the locations of pre-staged data on NOAA :term:`RDHPCS` (i.e., Hera and Orion). - -.. _Level1Data: - -.. table:: Level 1 RDHPCS Data - - +-----------+--------------------------------------------------+ - | Platform | Data Location | - +===========+==================================================+ - | Hera | /scratch1/NCEPDEV/nems/role.epic/landda/inputs | - +-----------+--------------------------------------------------+ - | Orion | /work/noaa/epic-ps/role-epic-ps/landda/inputs | - +-----------+--------------------------------------------------+ - | Jet | /mnt/lfs4/HFIP/hfv3gfs/role.epic/landda/inputs | - +-----------+--------------------------------------------------+ - | Cheyenne | /glade/work/epicufsrt/contrib/landda/inputs | - +-----------+--------------------------------------------------+ - -Users can either set the ``LANDDA_INPUTS`` environment variable to the location of their system's pre-staged data or use a soft link to the data. For example, on Hera, users may set: - -.. code-block:: console - - export LANDDA_INPUTS=/scratch1/NCEPDEV/nems/role.epic/landda/inputs - -Alternatively, users can add a soft link to the data. For example, on Orion: - -.. code-block:: console - - cd $LANDDAROOT - ln -s /work/noaa/epic-ps/role-epic-ps/landda/inputs . - -Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s `. Users with access to data for additional experiments may use the same process described above to point or link to that data by modifying the path to the data appropriately. - -Users who are not using Land DA on Hera or Orion should view :numref:`Chapter %s ` for instructions on running the containerized version of Land DA. :numref:`Section %s ` explains options for downloading the sample data onto their system. - -Get Code -*********** - -Clone the Land DA repository. - -.. code-block:: console - - git clone -b develop --recursive https://github.com/ufs-community/land-DA_workflow.git - -Build the Land DA System -*************************** - -#. Navigate to the workflow directory, and source the modulefiles. - - .. code-block:: console - - cd $LANDDAROOT/land-DA_workflow - module use modulefiles - module load landda_.intel - - where ```` is either ``hera`` or ``orion``. - -#. Create and navigate to a ``build`` directory. - - .. code-block:: console - - mkdir build - cd build - -#. Build the Land DA System. - - .. code-block:: console - - ecbuild .. - make -j 8 - - If the code successfully compiles, the console output should end with: - - .. code-block:: console - - [100%] Completed 'ufs-weather-model' - [100%] Built target ufs-weather-model - - Additionally, the ``build`` directory will contain several files and a ``bin`` subdirectory with three executables: - - * ``apply_incr.exe`` - * ``ufsLandDriver.exe`` - * ``vector2tile_converter.exe`` - -Configure the Experiment -*************************** - -#. Navigate back to the ``land-DA_workflow`` directory and check that the account/partition is correct in ``submit_cycle.sh``. - - .. code-block:: console - - cd .. - vi submit_cycle.sh - - If necessary, modify lines 3 and 4 to include the correct account and queue (qos) for the system. It may also be necessary to add the following line to the script to specify the partition: - - .. code-block:: console - - #SBATCH --partition=my_partition - - where ``my_partition`` is the name of the partition on the user's system. - - -#. Configure other elements of the experiment if desired. The ``develop`` branch includes four scripts with default experiment settings: - - * ``settings_DA_cycle_gdas`` for running the Jan. 1-3, 2016 sample case. - * ``settings_DA_cycle_era5`` for running a Jan. 1-3, 2020 sample case. - * ``settings_DA_cycle_gdas_restart`` for running the Jan. 3-4, 2016 sample case. - * ``settings_DA_cycle_era5_restart`` for running a Jan. 3-4, 2020 sample case. - - These files contain reasonable default values for running a Land DA experiment. Users who wish to run a more complex experiment may change the values in these files and the files they reference using information in Chapters :numref:`%s ` & :numref:`%s `. - - .. note:: - - The ``*restart`` settings files will only work after an experiment with the corresponding non-restart settings file has been run. These settings files are designed to use the restart files created by the first experiment cycle to pick up where it left off. For example, ``settings_DA_cycle_gdas`` runs from 2016-01-01 at 18z to 2016-01-03 at 18z. The ``settings_DA_cycle_gdas_restart`` will run from 2016-01-03 at 18z to 2016-01-04 at 18z. - -Run an Experiment -******************** - -The Land DA System uses a script-based workflow that is launched using the ``do_submit_cycle.sh`` script. This script requires an input file that details all the specifics of a given experiment. - -.. code-block:: console - - ./do_submit_cycle.sh settings_DA_cycle_gdas - -The system will output a message such as ``Submitted batch job ########``, indicating that the job was successfully submitted. If all goes well, two full cycles will run with data assimilation (DA) and a forecast. - -.. _VerifySuccess: - -Check Progress -***************** - -Verify that the experiment ran successfully: - -To check on the job status, users on a system with a Slurm job scheduler may run: - -.. code-block:: console - - squeue -u $USER - -To view progress, users can open the ``log*`` and ``err*`` files once they have been generated: - -.. code-block:: console - - tail -f log* err* - -The ``log*`` file for a successful experiment will end with an exit code of ``0:0`` and a message like: - -.. code-block:: console - - Job 42442720 (not serial) finished for user User.Name in partition hera with exit code 0:0 - -The ``err*`` file for a successful experiment will end with something similar to: - -.. code-block:: console - - + THISDATE=2016010318 - + date_count=2 - + '[' 2 -lt 2 ']' - + '[' 2016010318 -lt 2016010318 ']' - -Users will need to hit ``Ctrl+C`` to exit the files. - -.. attention:: - - If the log file contains a NetCDF error (e.g., ``ModuleNotFoundError: No module named 'netCDF4'``), run: - - .. code-block:: console - - python -m pip install netCDF4 - - Then, resubmit the job (``sbatch submit_cycle.sh``). - -Next, check for the background and analysis files in the ``cycle_land`` directory. - -.. code-block:: console - - ls -l ../cycle_land/DA_GHCN_test/mem000/restarts/vector/ diff --git a/docs/source/BuildingRunningTesting/BuildRunLandDA.rst b/docs/source/BuildingRunningTesting/BuildRunLandDA.rst new file mode 100644 index 00000000..cc86606f --- /dev/null +++ b/docs/source/BuildingRunningTesting/BuildRunLandDA.rst @@ -0,0 +1,265 @@ +.. _BuildRunLandDA: + +************************************ +Land DA Workflow (Hera & Orion) +************************************ + +This chapter provides instructions for building and running basic Land DA cases for the Unified Forecast System (:term:`UFS`) Land DA System. Users can choose between two options: + + * A Dec. 21, 2019 00z sample case using ERA5 data with the UFS Land Driver (``settings_DA_cycle_era5``) + * A Jan. 3, 2000 00z sample case using GSWP3 data with the UFS Noah-MP land component (``settings_DA_cycle_gswp3``). + +.. attention:: + + These steps are designed for use on :ref:`Level 1 ` systems (i.e., Hera and Orion) and may require significant changes on other systems. It is recommended that users on other systems run the containerized version of Land DA. Users may reference :numref:`Chapter %s: Containerized Land DA Workflow ` for instructions. + +Create a Working Directory +***************************** + +Create a directory for the Land DA experiment (``$LANDDAROOT``): + +.. code-block:: console + + mkdir /path/to/landda + cd /path/to/landda + export LANDDAROOT=`pwd` + +where ``/path/to/landda`` is the path to the directory where the user plans to run Land DA experiments. + +.. _GetData: + +Get Data +*********** + +:numref:`Table %s ` shows the locations of pre-staged data on NOAA :term:`RDHPCS` (i.e., Hera and Orion). + +.. _Level1Data: + +.. table:: Level 1 RDHPCS Data + + +-----------+--------------------------------------------------+ + | Platform | Data Location | + +===========+==================================================+ + | Hera | /scratch2/NAGAPE/epic/UFS_Land-DA/inputs | + +-----------+--------------------------------------------------+ + | Orion | /work/noaa/epic/UFS_Land-DA/inputs | + +-----------+--------------------------------------------------+ + +Users can either set the ``LANDDA_INPUTS`` environment variable to the location of their system's pre-staged data or use a soft link to the data. For example, on Hera, users may set: + +.. code-block:: console + + export LANDDA_INPUTS=/scratch2/NAGAPE/epic/UFS_Land-DA/inputs + +Alternatively, users can add a soft link to the data. For example, on Orion: + +.. code-block:: console + + cd $LANDDAROOT + ln -fs /work/noaa/epic/UFS_Land-DA/inputs + +Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s `. Users with access to data for additional experiments may use the same process described above to point or link to that data by modifying the path to the data appropriately. + +Users who are not using Land DA on Hera or Orion should view :numref:`Chapter %s ` for instructions on running the containerized version of Land DA. :numref:`Section %s ` explains options for downloading the sample data onto their system. + +Get Code +*********** + +Clone the Land DA repository. To clone the ``develop`` branch, run: + +.. code-block:: console + + git clone -b develop --recursive https://github.com/ufs-community/land-DA_workflow.git + +To clone the most recent release, run the same command with |tag| in place of ``develop``: + +.. code-block:: console + + git clone -b release/public-v1.2.0 --recursive https://github.com/ufs-community/land-DA_workflow.git + +.. _build-land-da: + +Build the Land DA System +*************************** + +#. Navigate to the workflow directory, and source the modulefiles. + + .. code-block:: console + + cd $LANDDAROOT/land-DA_workflow + module use modulefiles + module load landda_.intel + + where ```` is either ``hera`` or ``orion``. + +#. Create and navigate to a ``build`` directory. + + .. code-block:: console + + mkdir build + cd build + +#. Build the Land DA System. + + .. code-block:: console + + ecbuild .. + make -j4 + + If the code successfully compiles, the console output should end with: + + .. code-block:: console + + [100%] Completed 'ufs-weather-model' + [100%] Built target ufs-weather-model + + Additionally, the ``build`` directory will contain several files and directories along with a ``bin`` subdirectory with four executables: + + * ``apply_incr.exe`` + * ``ufsLand.exe`` + * ``vector2tile_converter.exe`` + * ``tile2tile_converter.exe`` + + +Configure the Experiment +*************************** + +The ``develop`` branch includes two scripts with default experiment settings: + + * ``settings_DA_cycle_era5`` for running a Dec. 21, 2019 00z sample case with the UFS Land Driver. + * ``settings_DA_cycle_gswp3`` for running a Jan. 3, 2000 00z sample case with the UFS Noah-MP land component. + +To configure an experiment: + +#. Navigate back to the ``land-DA_workflow`` directory and check that the account, queue, and partition are correct in ``submit_cycle.sh``. + + .. code-block:: console + + cd .. + vi submit_cycle.sh + + If necessary, modify lines 3 and 4 to include the correct account and queue (qos) for the system. It may also be necessary to add the following line to the script to specify the partition: + + .. code-block:: console + + #SBATCH --partition=my_partition + + where ``my_partition`` is the name of the partition on the user's system. + + When using the GSWP3 forcing option, users will need to update line 7 to say ``#SBATCH --cpus-per-task=4``. Users can perform this change manually in a code editor or run: + + .. code-block:: console + + sed -i 's/--cpus-per-task=1/--cpus-per-task=4/g' submit_cycle.sh + + +#. When using GSWP3 forcing option, users may also have to alter ``MACHINE_ID`` in line 8 of ``settings_DA_cycle_gswp3``. The default value is ``hera``, but ``orion`` is another option: + + .. code-block:: console + + export MACHINE_ID=orion + + Users running the ERA5 case do not need to make this change. + +#. Configure other elements of the experiment if desired. The ``settings_*`` files contain reasonable default values for running a Land DA experiment. Users who wish to run a more complex experiment may change the values in these files and the files they reference using information in Sections :numref:`%s ` & :numref:`%s `. + +Run an Experiment +******************** + +The Land DA System uses a script-based workflow that is launched using the ``do_submit_cycle.sh`` script. This script requires a ``settings_DA_cycle_*`` input file that details all the specifics of a given experiment. For example, to run the ERA5 case, users would run: + +.. code-block:: console + + ./do_submit_cycle.sh settings_DA_cycle_era5 + +Users can replace ``settings_DA_cycle_era5`` with a different settings file to run a different default experiment. Regardless of the file selected, the system will output a message such as ``Submitted batch job ########``, indicating that the job was successfully submitted. If all goes well, one full cycle will run with data assimilation (DA) and a forecast. + +.. _VerifySuccess: + +Check Progress +***************** + +To check on the experiment status, users on a system with a Slurm job scheduler may run: + +.. code-block:: console + + squeue -u $USER + +To view progress, users can open the ``log*`` and ``err*`` files once they have been generated: + +.. code-block:: console + + tail -f log* err* + +Users will need to type ``Ctrl+C`` to exit the files. For examples of what the log and error files should look like in a successful experiment, reference :ref:`ERA5 Experiment Logs ` or :ref:`GSWP3 Experiment Logs ` below. + +.. attention:: + + If the log file contains a NetCDF error (e.g., ``ModuleNotFoundError: No module named 'netCDF4'``), run: + + .. code-block:: console + + python -m pip install netCDF4 + + Then, resubmit the job (``sbatch submit_cycle.sh``). + +Next, check for the background and analysis files in the test directory. + +.. code-block:: console + + ls -l ../landda_expts/DA__test/mem000/restarts/`` + +where: + + * ```` is either ``era5`` or ``gswp3``, and + * ```` is either ``vector`` or ``tile`` depending on whether ERA5 or GSWP3 forcing data was used, respectively. + +The experiment should generate several files. + +.. _era5-log-output: + +ERA5 Experiment Logs +===================== + +For the ERA5 experiment, the ``log*`` file for a successful experiment will a message like: + +.. code-block:: console + + Creating: .//ufs_land_restart.2019-12-22_00-00-00.nc + Searching for forcing at time: 2019-12-22 01:00:00 + +The ``err*`` file for a successful experiment will end with something similar to: + +.. code-block:: console + + + THISDATE=2019122200 + + date_count=1 + + '[' 1 -lt 1 ']' + + '[' 2019122200 -lt 2019122200 ']' + +.. _gswp3-log-output: + +GSWP3 Experiment Logs +======================= + +For the GSWP3 experiment, the ``log*`` file for a successful experiment will end with a list of resource statistics. For example: + +.. code-block:: console + + Number of times filesystem performed OUTPUT = 250544 + Number of Voluntary Context Switches = 3252 + Number of InVoluntary Context Switches = 183 + *****************END OF RESOURCE STATISTICS************************* + +The ``err*`` file for a successful experiment will end with something similar to: + +.. code-block:: console + + + echo 'do_landDA: calling apply snow increment' + + [[ '' =~ hera\.internal ]] + + /apps/intel-2022.1.2/intel-2022.1.2/mpi/2021.5.1/bin/mpiexec -n 6 /path/to/land-DA_workflow/build/bin/apply_incr.exe /path/to/landda_expts/DA_GSWP3_test/DA/logs//apply_incr.log + + [[ 0 != 0 ]] + + '[' YES == YES ']' + + '[' YES == YES ']' + + cp /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile1.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile2.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile3.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile4.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile5.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile6.nc /path/to/landda_expts/DA_GSWP3_test/DA/jedi_incr/ + + [[ YES == \N\O ]] diff --git a/docs/source/Container.rst b/docs/source/BuildingRunningTesting/Container.rst similarity index 67% rename from docs/source/Container.rst rename to docs/source/BuildingRunningTesting/Container.rst index 7cb7e1ee..8c5c7fa9 100644 --- a/docs/source/Container.rst +++ b/docs/source/BuildingRunningTesting/Container.rst @@ -4,9 +4,12 @@ Containerized Land DA Workflow ********************************** -These instructions will help users build and run a basic case for the Unified Forecast System (:term:`UFS`) Land Data Assimilation (DA) System using a `Singularity `__ container. The Land DA :term:`container` packages together the Land DA System with its dependencies (e.g., :term:`spack-stack`, :term:`JEDI`) and provides a uniform environment in which to build and run the Land DA System. Normally, the details of building and running Earth systems models will vary based on the computing platform because there are many possible combinations of operating systems, compilers, :term:`MPIs `, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother experience building and running Land DA. This approach is recommended for users not running Land DA on a supported :ref:`Level 1 ` system (i.e., Hera, Orion). +These instructions will help users build and run a basic case for the Unified Forecast System (:term:`UFS`) Land Data Assimilation (DA) System using a `Singularity/Apptainer `__ container. The Land DA :term:`container` packages together the Land DA System with its dependencies (e.g., :term:`spack-stack`, :term:`JEDI`) and provides a uniform environment in which to build and run the Land DA System. Normally, the details of building and running Earth systems models will vary based on the computing platform because there are many possible combinations of operating systems, compilers, :term:`MPIs `, and package versions available. Installation via Singularity/Apptainer container reduces this variability and allows for a smoother experience building and running Land DA. This approach is recommended for users not running Land DA on a supported :ref:`Level 1 ` system (i.e., Hera, Orion). -The out-of-the-box Land DA case described in this User's Guide builds a weather forecast for January 1, 2016 at 18z to January 3, 2016 at 18z. +This chapter provides instructions for building and running basic Land DA cases for the Unified Forecast System (:term:`UFS`) Land DA System. Users can choose between two options: + + * A Dec. 21, 2019 00z sample case using :term:`ERA5` data with the UFS Land Driver (``settings_DA_cycle_era5``) + * A Jan. 3, 2000 00z sample case using :term:`GSWP3` data with the UFS Noah-MP land component (``settings_DA_cycle_gswp3``). .. attention:: @@ -19,17 +22,21 @@ Prerequisites The containerized version of Land DA requires: - * `Installation of Singularity `__ + * `Installation of Apptainer `__ * At least 6 CPU cores * An **Intel** compiler and :term:`MPI` (available for free `here `__) -Install Singularity -====================== +Install Singularity/Apptainer +=============================== -To build and run Land DA using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `__. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. +.. note:: -.. note:: + As of November 2021, the Linux-supported version of Singularity has been `renamed `__ to *Apptainer*. Apptainer has maintained compatibility with Singularity, so ``singularity`` commands should work with either Singularity or Apptainer (see compatibility details `here `__.) + +To build and run Land DA using a Singularity/Apptainer container, first install the software according to the `Apptainer Installation Guide `__. This will include the installation of all dependencies. + +.. attention:: Docker containers can only be run with root privileges, and users generally do not have root privileges on :term:`HPCs `. However, a Singularity image may be built directly from a Docker image for use on the system. .. _DownloadContainer: @@ -74,19 +81,19 @@ Set a top-level directory location for Land DA work, and navigate to it. For exa .. code-block:: console - export LANDDAROOT=/path/to/landda - [[ -d $LANDDAROOT ]] || mkdir -p $LANDDAROOT - cd $LANDDAROOT + mkdir /path/to/landda + cd /path/to/landda + export LANDDAROOT=`pwd` -where ``/path/to/landda`` is the path to this top-level directory (e.g., ``/Users/Joe.Schmoe/landda``). The second line will create the directory if it does not exist yet. +where ``/path/to/landda`` is the path to this top-level directory (e.g., ``/Users/Joe.Schmoe/landda``). .. hint:: - If a ``singularity: command not found`` error message appears in any of the following steps, try running: ``module load singularity``. + If a ``singularity: command not found`` error message appears in any of the following steps, try running: ``module load singularity`` or (on Derecho) ``module load apptainer``. NOAA RDHPCS Systems ---------------------- -On many NOAA :term:`RDHPCS` systems, a container named ``ubuntu20.04-intel-ue-landda.img`` has already been built, and users may access the container at the locations in :numref:`Table %s `. +On many NOAA :term:`RDHPCS` systems, a container named ``ubuntu20.04-intel-landda-release-public-v1.2.0.img`` has already been built, and users may access the container at the locations in :numref:`Table %s `. .. _PreBuiltContainers: @@ -95,46 +102,45 @@ On many NOAA :term:`RDHPCS` systems, a container named ``ubuntu20.04-intel-ue-la +--------------+--------------------------------------------------------+ | Machine | File location | +==============+========================================================+ - | Cheyenne | /glade/scratch/epicufsrt/containers | + | Derecho | /glade/work/epicufsrt/contrib/containers | + +--------------+--------------------------------------------------------+ + | Gaea | /lustre/f2/dev/role.epic/contrib/containers | +--------------+--------------------------------------------------------+ | Hera | /scratch1/NCEPDEV/nems/role.epic/containers | +--------------+--------------------------------------------------------+ | Jet | /mnt/lfs4/HFIP/hfv3gfs/role.epic/containers | +--------------+--------------------------------------------------------+ - | Orion | /work/noaa/epic-ps/role-epic-ps/containers | + | Orion | /work/noaa/epic/role-epic/contrib/containers | +--------------+--------------------------------------------------------+ -.. note:: - Singularity is not available on Gaea, and therefore, container use is not supported on Gaea. - Users can simply set an environment variable to point to the container: .. code-block:: console - export img=path/to/ubuntu20.04-intel-ue-landda.img + export img=path/to/ubuntu20.04-intel-landda-release-public-v1.2.0.img If users prefer, they may copy the container to their local working directory. For example, on Jet: .. code-block:: console - cp /mnt/lfs4/HFIP/hfv3gfs/role.epic/containers/ubuntu20.04-intel-ue-landda.img . + cp /mnt/lfs4/HFIP/hfv3gfs/role.epic/containers/ubuntu20.04-intel-landda-release-public-v1.2.0.img . Other Systems ---------------- -On other systems, users can build the Singularity container from a public Docker :term:`container` image or download the ``ubuntu20.04-intel-landda.img`` container from the `Land DA Data Bucket `__. Downloading may be faster depending on the download speed on the user's system. However, the container in the data bucket is the ``release/v1.0.0`` container rather than the updated ``develop`` branch container. +On other systems, users can build the Singularity container from a public Docker :term:`container` image or download the ``ubuntu20.04-intel-landda-release-public-v1.2.0.img`` container from the `Land DA Data Bucket `__. Downloading may be faster depending on the download speed on the user's system. However, the container in the data bucket is the ``release/v1.2.0`` container rather than the updated ``develop`` branch container. To download from the data bucket, users can run: .. code-block:: console - wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/ubuntu20.04-intel-landda.img + wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/v1.2.0/ubuntu20.04-intel-landda-release-public-v1.2.0.img To build the container from a Docker image, users can run: .. code-block:: console - singularity build --force ubuntu20.04-intel-ue-landda.img docker://noaaepic/ubuntu20.04-intel-ue-landda:unified-dev-testmp + singularity build --force ubuntu20.04-intel-landda-release-public-v1.2.0.img docker://noaaepic/ubuntu20.04-intel-landda:release-public-v1.2.0 This process may take several hours depending on the system. @@ -147,16 +153,23 @@ This process may take several hours depending on the system. Get Data *********** -In order to run the Land DA System, users will need input data in the form of fix files, model forcing files, restart files, and observations for data assimilation. These files are already present on NOAA RDHPCS systems (see :numref:`Section %s ` for details). +In order to run the Land DA System, users will need input data in the form of fix files, model forcing files, restart files, and observations for data assimilation. These files are already present on Level 1 systems (see :numref:`Section %s ` for details). Users on any system may download and untar the data from the `Land DA Data Bucket `__ into their ``$LANDDAROOT`` directory. .. code-block:: console - wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/landda-input-data-{YEAR}.tar.gz - tar xvfz landda-input-data-{YEAR}.tar.gz + cd $LANDDAROOT + wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/v1.2.0/Landdav1.2.0_input_data.tar.gz + tar xvfz Landdav1.2.0_input_data.tar.gz + +If users choose to add data in a location other than ``$LANDDAROOT``, they can set the input data directory by running: + +.. code-block:: console + + export LANDDA_INPUTS=/path/to/input/data -replacing ``{YEAR}`` with either ``2016`` or ``2020``. The default name for the untarred file is ``inputs``. +where ``/path/to/input/data`` is replaced by the absolute path to the location of their Land DA input data. .. _RunContainer: @@ -178,7 +191,7 @@ Save the location of the container in an environment variable. .. code-block:: console - export img=path/to/ubuntu20.04-intel-ue-landda.img + export img=path/to/ubuntu20.04-intel-landda-release-public-v1.2.0.img Set the ``USE_SINGULARITY`` environment variable to "yes". @@ -188,18 +201,18 @@ Set the ``USE_SINGULARITY`` environment variable to "yes". This variable tells the workflow to use the containerized version of all the executables (including python) when running a cycle. -Users may convert a container ``.img`` file to a writable sandbox. This step is required when running on Cheyenne but is optional on most other systems: +Users may convert a container ``.img`` file to a writable sandbox. This step is optional on most systems: .. code-block:: console - singularity build --sandbox ubuntu20.04-intel-ue-landda $img + singularity build --sandbox ubuntu20.04-intel-landda-release-public-v1.2.0 $img When making a writable sandbox on NOAA RDHPCS systems, the following warnings commonly appear and can be ignored: .. code-block:: console INFO: Starting build... - INFO: Verifying bootstrap image ubuntu20.04-intel-ue-landda.img + INFO: Verifying bootstrap image ubuntu20.04-intel-landda-release-public-v1.2.0.img WARNING: integrity: signature not found for object group 1 WARNING: Bootstrap image could not be verified, but build will continue. @@ -252,20 +265,31 @@ When using a Singularity container, Intel compilers and Intel :term:`MPI` (prefe Configure the Experiment =========================== +Modify Machine Settings +------------------------ + Users on a system with a Slurm job scheduler will need to make some minor changes to the ``submit_cycle.sh`` file. Open the file and change the account and queue (qos) to match the desired account and qos on the system. Users may also need to add the following line to the script to specify the partition. For example, on Jet, users should set: .. code-block:: console #SBATCH --partition=xjet +When using the GSWP3 forcing option, users will need to update line 7 to say ``#SBATCH --cpus-per-task=4``. Users can perform this change manually in a code editor or run: + +.. code-block:: console + + sed -i 's/--cpus-per-task=1/--cpus-per-task=4/g' submit_cycle.sh + Save and close the file. -.. _RunExptC: +Modify Experiment Settings +--------------------------- -Run the Experiment -===================== +The Land DA System uses a script-based workflow that is launched using the ``do_submit_cycle.sh`` script. That script requires an input file that details all the specifics of a given experiment. EPIC has provided two sample ``settings_*`` files as examples: ``settings_DA_cycle_era5`` and ``settings_DA_cycle_gswp3``. -The Land DA System uses a script-based workflow that is launched using the ``do_submit_cycle.sh`` script. That script requires an input file that details all the specifics of a given experiment. EPIC has provided four sample ``settings_*`` files as examples: ``settings_DA_cycle_gdas``, ``settings_DA_cycle_era5``, ``settings_DA_cycle_gdas_restart``, and ``settings_DA_cycle_era5_restart``. The ``*restart`` settings files will only work after an experiment with the corresponding non-restart settings file has been run. This is because they are designed to use the restart files created by the first experiment cycle to pick up where it left off. (e.g., ``settings_DA_cycle_gdas`` runs from 2016-01-01 at 18z to 2016-01-03 at 18z. The ``settings_DA_cycle_gdas_restart`` will run from 2016-01-03 at 18z to 2016-01-04 at 18z.) +.. attention:: + + Note that the GSWP3 option will only run as-is on Hera and Orion. Users on other systems may need to make significant changes to configuration files, which is not a supported option for the |latestr| release. It is recommended that users on these systems use the UFS land driver ERA5 sample experiment set in ``settings_DA_cycle_era5``. First, update the ``$BASELINE`` environment variable in the selected ``settings_DA_*`` file to say ``singularity.internal`` instead of ``hera.internal``: @@ -273,11 +297,18 @@ First, update the ``$BASELINE`` environment variable in the selected ``settings_ export BASELINE=singularity.internal +When using the GSWP3 forcing option, users must also update the ``MACHINE_ID`` to ``orion`` in ``settings_DA_cycle_gswp3`` if running on Orion. + +.. _RunExptC: + +Run the Experiment +===================== + To start the experiment, run: .. code-block:: console - ./do_submit_cycle.sh settings_DA_cycle_gdas + ./do_submit_cycle.sh settings_DA_cycle_era5 The ``do_submit_cycle.sh`` script will read the ``settings_DA_cycle_*`` file and the ``release.environment`` file, which contain sensible experiment default values to simplify the process of running the workflow for the first time. Advanced users will wish to modify the parameters in ``do_submit_cycle.sh`` to fit their particular needs. After reading the defaults and other variables from the settings files, ``do_submit_cycle.sh`` creates a working directory (named ``workdir`` by default) and an output directory called ``landda_expts`` in the parent directory of ``land-DA_workflow`` and then submits a job (``submit_cycle.sh``) to the queue that will run through the workflow. If all succeeds, users will see ``log`` and ``err`` files created in ``land-DA_workflow`` along with a ``cycle.log`` file, which will show where the cycle has ended. The ``landda_expts`` directory will also be populated with data in the following directories: @@ -285,5 +316,8 @@ The ``do_submit_cycle.sh`` script will read the ``settings_DA_cycle_*`` file and landda_expts/DA_GHCN_test/DA/ landda_expts/DA_GHCN_test/mem000/restarts/vector/ + landda_expts/DA_GHCN_test/mem000/restarts/tile/ + +Depending on the experiment, either the ``vector`` or the ``tile`` directory will have data, but not both. Users can check experiment progress/success according to the instructions in :numref:`Section %s `, which apply to both containerized and non-containerized versions of the Land DA System. \ No newline at end of file diff --git a/docs/source/BuildingRunningTesting/TestingLandDA.rst b/docs/source/BuildingRunningTesting/TestingLandDA.rst new file mode 100644 index 00000000..986a2245 --- /dev/null +++ b/docs/source/BuildingRunningTesting/TestingLandDA.rst @@ -0,0 +1,52 @@ +.. _TestingLandDA: + +************************************ +Testing the Land DA Workflow +************************************ + +This chapter provides instructions for using the Land DA CTest suite. These steps are designed for use on :ref:`Level 1 ` systems (i.e., Hera and Orion) and may require significant changes on other systems. + +.. attention:: + + This chapter assumes that the user has already built the Land DA System according to the instructions in :numref:`Section %s ` and has access to the data provided in the most recent release. (See :numref:`Table %s ` for the locations of pre-staged data on NOAA :term:`RDHPCS` [i.e., Hera and Orion].) + +Process +********* + +From the working directory (``$LANDDAROOT``), navigate to ``build``. Then run: + +.. code-block:: console + + salloc --ntasks 8 --exclusive --qos=debug --partition=debug --time=00:30:00 --account= + module use modulefiles && module load landda_.intel + ctest + +where ```` corresponds to the user's actual account name and ```` is ``hera`` or ``orion``. + +This will allocate a compute node, load the appropriate modulefiles, and run the CTests. + +Tests +******* + +The ERA5 CTests test the operability of seven major elements of the Land DA System: ``vector2tile``, ``create_ens``, ``letkfoi_snowda``, ``apply_jediincr``, ``tile2vector``, ``land_driver``, and ``ufs_datm_land``. The tests and their dependencies are listed in the ``land-DA_workflow/test/CMakeLists.txt`` file. Currently, the CTests are only run on Hera and Orion; they cannot yet be run via container. + +.. list-table:: *Land DA CTests* + :widths: 20 50 + :header-rows: 1 + + * - Test + - Description + * - ``test_vector2tile`` + - Tests the vector-to-tile function for use in JEDI + * - ``test_create_ens`` + - Tests creation of a pseudo-ensemble for use in LETKF-OI. + * - ``test_letkfoi_snowda`` + - Tests the use of LETKF-OI to assimilate snow DA. + * - ``test_apply_jediincr`` + - Tests the ability to add a JEDI increment. + * - ``test_tile2vector`` + - Tests the tile-to-vector function for use in ``ufs-land-driver`` + * - ``test_land_driver`` + - Tests proper functioning of ``ufs-land-driver`` + * - ``test_ufs_datm_land`` + - Tests proper functioning of the UFS land model (``ufs-datm-lnd``) diff --git a/docs/source/BuildingRunningTesting/index.rst b/docs/source/BuildingRunningTesting/index.rst new file mode 100644 index 00000000..87558e29 --- /dev/null +++ b/docs/source/BuildingRunningTesting/index.rst @@ -0,0 +1,11 @@ +.. _build-run-test-index: + +Building, Running, and Testing the Land DA System +=================================================== + +.. toctree:: + :maxdepth: 3 + + BuildRunLandDA + Container + TestingLandDA diff --git a/docs/source/DASystem.rst b/docs/source/CustomizingTheWorkflow/DASystem.rst similarity index 80% rename from docs/source/DASystem.rst rename to docs/source/CustomizingTheWorkflow/DASystem.rst index 068d7f5b..a7d4053c 100644 --- a/docs/source/DASystem.rst +++ b/docs/source/CustomizingTheWorkflow/DASystem.rst @@ -4,71 +4,75 @@ Land Data Assimilation System *************************************************** -This chapter describes the configuration of the offline Land :term:`Data Assimilation` (DA) System, which utilizes the UFS Noah-MP component together with JEDI's ``jedi-bundle`` (Skylab v3.0) to enable cycled model forecasts. The data assimilation framework applies the Local Ensemble Transform Kalman Filter-Optimal Interpolation (LETKF-OI) algorithm to combine the state-dependent background error derived from an ensemble forecast with the observations and their corresponding uncertainties to produce an analysis ensemble (:cite:t:`HuntEtAl2007`, 2007). +This chapter describes the configuration of the offline Land :term:`Data Assimilation` (DA) System, which utilizes the UFS Noah-MP component together with the ``jedi-bundle`` (Skylab v4.0) to enable cycled model forecasts. The data assimilation framework applies the Local Ensemble Transform Kalman Filter-Optimal Interpolation (LETKF-OI) algorithm to combine the state-dependent background error derived from an ensemble forecast with the observations and their corresponding uncertainties to produce an analysis ensemble (:cite:t:`HuntEtAl2007`, 2007). Joint Effort for Data Assimilation Integration (JEDI) ******************************************************** +.. attention:: + + Users are encouraged to visit the `JEDI Documentation `__. Much of the information in this chapter is drawn directly from there with modifications to clarify JEDI's use specifically in the context of the Land DA System. + The Joint Effort for Data assimilation Integration (:term:`JEDI`) is a unified and versatile :term:`data assimilation` (DA) system for Earth System Prediction that can be run on a variety of platforms. JEDI is developed by the Joint Center for Satellite Data Assimilation (`JCSDA `__) and partner agencies, including NOAA. The core feature of JEDI is separation of concerns. The data assimilation update, observation selection and processing, and observation operators are all coded with no knowledge of or dependency on each other or on the forecast model. The NOAH-MP offline Land DA System uses three JEDI components: - * The Object-Oriented Prediction System (`OOPS `__) for the data assimilation algorithm - * The Interface for Observation Data Access (`IODA `__) for the observation formatting and processing - * The Unified Forward Operator (`UFO `__) for comparing model forecasts and observations + * The Object-Oriented Prediction System (:ref:`OOPS `) for the data assimilation algorithm + * The Interface for Observation Data Access (`IODA `__) for the observation formatting and processing + * The Unified Forward Operator (`UFO `__) for comparing model forecasts and observations JEDI's Unified Forward Operator (UFO) links observation operators with the Object Oriented Prediction System (OOPS) to compute a simulated observation given a known model state. It does not restrict observation operators based on model-specific code structures or requirements. The UFO code structure provides generic classes for observation bias correction and quality control. Within this system, IODA converts the observation data into model-specific formats to be ingested by each model's data assimilation system. This involves model-specific data conversion efforts. Object-Oriented Prediction System (OOPS) =========================================== -A data assimilation experiment requires a ``yaml`` configuration file that specifies the details of the data assimilation and observation processing. OOPS provides the core set of data assimilation algorithms in JEDI by combining the generic building blocks required for the algorithms. The OOPS system does not require knowledge of any specific application model implementation structure or observation data information. In the Noah-MP offline Land DA System, OOPS reads the model forecast states from the restart files generated by the Noah-MP model. JEDI UFO contains generic quality control options and filters that can be applied to each observation system, without coding at certain model application levels. More information on the key concepts of the JEDI software design can be found in :cite:t:`Tremolet&Auligne2020` (2020), :cite:t:`HoldawayEtAl2020` (2020), and :cite:t:`HoneyagerEtAl2020` (2020). +A data assimilation experiment requires a ``.yaml`` configuration file that specifies the details of the data assimilation and observation processing. OOPS provides the core set of data assimilation algorithms in JEDI by combining the generic building blocks required for the algorithms. The OOPS system does not require knowledge of any specific application model implementation structure or observation data information. In the Noah-MP offline Land DA System, OOPS reads the model forecast states from the restart files generated by the Noah-MP model. JEDI UFO contains generic quality control options and filters that can be applied to each observation system, without coding at certain model application levels. More information on the key concepts of the JEDI software design can be found in :cite:t:`Tremolet&Auligne2020` (2020), :cite:t:`HoldawayEtAl2020` (2020), and :cite:t:`HoneyagerEtAl2020` (2020). JEDI Configuration Files & Parameters ---------------------------------------- -To create the DA experiment, the user should create or modify an experiment-specific configuration ``yaml`` file. This ``yaml`` file should contain certain fundamental components: geometry, window begin, window length, background, driver, local ensemble DA, output increment, and observations. These components can be implemented differently for different models and observation types, so they frequently contain distinct parameters and variable names depending on the use case. Therefore, this section of the User's Guide focuses on assisting users with understanding and customizing these top-level configuration items in order to run Land DA experiments. Users may also reference the `JEDI Documentation `__ for additional information. +To create the DA experiment, the user should create or modify an experiment-specific configuration ``.yaml`` file. This ``.yaml`` file should contain certain fundamental components: geometry, window begin, window length, background, driver, local ensemble DA, output increment, and observations. These components can be implemented differently for different models and observation types, so they frequently contain distinct parameters and variable names depending on the use case. Therefore, this section of the User's Guide focuses on assisting users with understanding and customizing these top-level configuration items in order to run Land DA experiments. Users may also reference the `JEDI Documentation `__ for additional information. -Users may find the following example ``yaml`` configuration file to be a helpful starting point. This file (with user-appropriate modifications) is required by JEDI for snow data assimilation. The following subsections will explain the variables within each top-level item of the ``yaml`` file. +Users may find the following example ``GHCN.yaml`` configuration file to be a helpful starting point. A similar file (with user-appropriate modifications) is required by JEDI for snow data assimilation. The following subsections will explain the variables within each top-level item of the ``.yaml`` file. The ``GHCN.yaml`` file for the |latestr| release can be found within the cloned repository at ``DA_update/jedi/fv3-jedi/yaml_files/psl_develop/GHCN.yaml``. -.. code-block:: console +.. code-block:: yaml geometry: fms initialization: namelist filename: Data/fv3files/fmsmpp.nml field table filename: Data/fv3files/field_table - akbk: Data/fv3files/akbk127.nc4 - npx: 97 - npy: 97 - npz: 127 + akbk: Data/fv3files/akbk64.nc4 + npx: 49 + npy: 49 + npz: 64 field metadata override: Data/fieldmetadata/gfs-land.yaml time invariant fields: state fields: - datetime: 2016-01-02T18:00:00Z + datetime: 2019-12-21T00:00:00Z filetype: fms restart skip coupler file: true state variables: [orog_filt] - datapath: /mnt/lfs4/HFIP/hfv3gfs/role.epic/landda/inputs/forcing/gdas/orog_files + datapath: /scratch2/NAGAPE/epic/UFS_Land-DA/inputs/forcing/era5/orog_files filename_orog: oro_C96.mx100.nc - window begin: 2016-01-02T12:00:00Z - window length: PT6H + window begin: 2019-12-21T00:00:00Z + window length: PT24H background: - date: &date 2016-01-02T18:00:00Z + date: &date 2019-12-21T00:00:00Z members: - - datetime: 2016-01-02T18:00:00Z + - datetime: 2019-12-21T00:00:00Z filetype: fms restart state variables: [snwdph,vtype,slmsk] datapath: mem_pos/ - filename_sfcd: 20160102.180000.sfc_data.nc - filename_cplr: 20160102.180000.coupler.res - - datetime: 2016-01-02T18:00:00Z + filename_sfcd: 20191221.000000.sfc_data.nc + filename_cplr: 20191221.000000.coupler.res + - datetime: 2019-12-21T00:00:00Z filetype: fms restart state variables: [snwdph,vtype,slmsk] datapath: mem_neg/ - filename_sfcd: 20160102.180000.sfc_data.nc - filename_cplr: 20160102.180000.coupler.res + filename_sfcd: 20191221.000000.sfc_data.nc + filename_cplr: 20191221.000000.coupler.res driver: save posterior mean: false @@ -90,7 +94,7 @@ Users may find the following example ``yaml`` configuration file to be a helpful observations: observers: - obs space: - name: Simulate + name: SnowDepthGHCN distribution: name: Halo halo size: 250e3 @@ -98,11 +102,11 @@ Users may find the following example ``yaml`` configuration file to be a helpful obsdatain: engine: type: H5File - obsfile: GHCN_2016010218.nc + obsfile: GHCN_2019122100.nc obsdataout: engine: type: H5File - obsfile: output/DA/hofx/letkf_hofx_ghcn_2016010218.nc + obsfile: output/DA/hofx/letkf_hofx_ghcn_2019122100.nc obs operator: name: Identity obs error: @@ -122,12 +126,12 @@ Users may find the following example ``yaml`` configuration file to be a helpful - filter: Domain Check # missing station elevation (-999.9) where: - variable: - name: height@MetaData + name: MetaData/height minvalue: -999.0 - filter: Domain Check # land only where: - variable: - name: slmsk@GeoVaLs + name: GeoVaLs/slmsk minvalue: 0.5 maxvalue: 1.5 # GFSv17 only. @@ -139,7 +143,7 @@ Users may find the following example ``yaml`` configuration file to be a helpful - filter: RejectList # no land-ice where: - variable: - name: vtype@GeoVaLs + name: GeoVaLs/vtype minvalue: 14.5 maxvalue: 15.5 - filter: Background Check # gross error check @@ -151,7 +155,7 @@ Users may find the following example ``yaml`` configuration file to be a helpful .. note:: - Any default values indicated in the sections below are the defaults set in ``letkfoi_snow.yaml`` or ``GHCN.yaml`` (found within the ``land-offline_workflow/DA_update/jedi/fv3-jedi/yaml_files/release-v1.0/`` directory). + Any default values indicated in the sections below are the defaults set in ``letkfoi_snow.yaml`` or ``GHCN.yaml`` (found within the ``DA_update/jedi/fv3-jedi/yaml_files/psl_develop`` directory). Geometry ^^^^^^^^^^^ @@ -228,7 +232,7 @@ Background The ``background:`` section includes information on the analysis file(s) (also known as "members") generated by the previous cycle. ``date`` - Specifies the background date. The format is ``&date YYYY-MM-DDTHH:00:00Z``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour. For example: ``&date 2016-01-02T18:00:00Z`` + Specifies the background date. The format is ``&date YYYY-MM-DDTHH:00:00Z``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour. For example: ``&date 2019-12-21T00:00:00Z`` ``members`` Specifies information on analysis file(s) generated by a previous cycle. @@ -246,15 +250,15 @@ The ``background:`` section includes information on the analysis file(s) (also k Specifies the path for state variables data. Valid values: ``mem_pos/`` | ``mem_neg/``. (With default experiment values, the full path will be ``workdir/mem000/jedi/$datapath``.) ``filename_sfcd`` - Specifies the name of the surface data file. This usually takes the form ``YYYYMMDD.HHmmss.sfc_data.nc``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour, mm is a valid 2-digit minute and ss is a valid 2-digit second. For example: ``20160102.180000.sfc_data.nc`` + Specifies the name of the surface data file. This usually takes the form ``YYYYMMDD.HHmmss.sfc_data.nc``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour, mm is a valid 2-digit minute and ss is a valid 2-digit second. For example: ``20191221.000000.sfc_data.nc`` ``filename_cprl`` - Specifies the name of file that contains metadata for the restart. This usually takes the form ``YYYYMMDD.HHmmss.coupler.res``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour, mm is a valid 2-digit minute and ss is a valid 2-digit second. For example: ``20160102.180000.coupler.res`` + Specifies the name of file that contains metadata for the restart. This usually takes the form ``YYYYMMDD.HHmmss.coupler.res``, where YYYY is a 4-digit year, MM is a valid 2-digit month, DD is a valid 2-digit day, and HH is a valid 2-digit hour, mm is a valid 2-digit minute and ss is a valid 2-digit second. For example: ``20191221.000000.coupler.res`` Driver ^^^^^^^^^ -The ``driver:`` section describes optional modifications to the behavior of the LocalEnsembleDA driver. For details, refer to `Local Ensemble Data Assimilation in OOPS `__ in the JEDI Documentation. +The ``driver:`` section describes optional modifications to the behavior of the LocalEnsembleDA driver. For details, refer to :ref:`Local Ensemble Data Assimilation in OOPS ` in the JEDI Documentation. ``save posterior mean`` Specifies whether to save the posterior mean. Valid values: ``true`` | ``false`` @@ -333,12 +337,12 @@ Output Increment Observations ^^^^^^^^^^^^^^^ -The ``observations:`` item describes one or more types of observations, each of which is a multi-level YAML/JSON object in and of itself. Each of these observation types is read into JEDI as an ``eckit::Configuration`` object (see `JEDI Documentation `__ for more details). +The ``observations:`` item describes one or more types of observations, each of which is a multi-level YAML/JSON object in and of itself. Each of these observation types is read into JEDI as an ``eckit::Configuration`` object (see :ref:`JEDI Documentation ` for more details). ``obs space:`` ```````````````` -The ``obs space:`` section of the ``yaml`` comes under the ``observations.observers:`` section and describes the configuration of the observation space. An observation space handles observation data for a single observation type. +The ``obs space:`` section of the ``.yaml`` comes under the ``observations.observers:`` section and describes the configuration of the observation space. An observation space handles observation data for a single observation type. ``name`` Specifies the name of observation space. The Land DA System uses ``Simulate`` for the default case. @@ -383,7 +387,7 @@ The ``obs space:`` section of the ``yaml`` comes under the ``observations.observ The ``obs operator:`` section describes the observation operator and its options. An observation operator is used for computing H(x). ``name`` - Specifies the name in the ``ObsOperator`` and ``LinearObsOperator`` factory, defined in the C++ code. Valid values include: ``Identity``. See `JEDI Documentation `__ for more options. + Specifies the name in the ``ObsOperator`` and ``LinearObsOperator`` factory, defined in the C++ code. Valid values include: ``Identity``. See :ref:`JEDI Documentation ` for more options. ``obs error:`` `````````````````` @@ -423,10 +427,10 @@ The ``obs error:`` section explains how to calculate the observation error covar ``obs filters:`` `````````````````` -Observation filters are used to define Quality Control (QC) filters. They have access to observation values and metadata, model values at observation locations, simulated observation value, and their own private data. See `Observation Filters `__ in the JEDI Documentation for more detail. The ``obs filters:`` section contains the following fields: +Observation filters are used to define Quality Control (QC) filters. They have access to observation values and metadata, model values at observation locations, simulated observation value, and their own private data. See :ref:`Observation Filters ` in the JEDI Documentation for more detail. The ``obs filters:`` section contains the following fields: ``filter`` - Describes the parameters of a given QC filter. Valid values include: ``Bounds Check`` | ``Background Check`` | ``Domain Check`` | ``RejectList``. See descriptions in the JEDI's `Generic QC Filters `__ Documentation for more. + Describes the parameters of a given QC filter. Valid values include: ``Bounds Check`` | ``Background Check`` | ``Domain Check`` | ``RejectList``. See descriptions in the JEDI's `Generic QC Filters `__ Documentation for more. +--------------------+--------------------------------------------------+ | Filter Name | Description | @@ -438,9 +442,9 @@ Observation filters are used to define Quality Control (QC) filters. They have a | | between the observation value and model-simulated| | | value (*y* - *H(x)*) and rejects observations | | | where the absolute difference is larger than | - | | the ``absolute threshold`` or ``threshold`` * | - | | *observation error* or ``threshold`` * | - | | *background error*. | + | | the ``absolute threshold`` or the | + | | :math:`threshold * observation error` or the | + | | :math:`threshold * background error`. | +--------------------+--------------------------------------------------+ | Domain Check | This filter retains all observations selected by | | | the ``where`` statement and rejects all others. | @@ -458,7 +462,7 @@ Observation filters are used to define Quality Control (QC) filters. They have a ``name`` Name of the filter variable. Users may indicate additional filter variables using the ``name`` field on consecutive lines (see code snippet below). Valid values include: ``totalSnowDepth`` - .. code-block:: console + .. code-block:: yaml filter variables: - name: variable_1 @@ -471,16 +475,16 @@ Observation filters are used to define Quality Control (QC) filters. They have a Maximum value for variables in the filter. ``threshold`` - This variable may function differently depending on the filter it is used in. In the `Background Check Filter `__, an observation is rejected when the difference between the observation value (*y*) and model simulated value (*H(x)*) is larger than the ``threshold`` * *observation error*. + This variable may function differently depending on the filter it is used in. In the `Background Check Filter `__, an observation is rejected when the difference between the observation value (*y*) and model simulated value (*H(x)*) is larger than the ``threshold`` * *observation error*. ``action`` - Indicates which action to take once an observation has been flagged by a filter. See `Filter Actions `__ in the JEDI documentation for a full explanation and list of valid values. + Indicates which action to take once an observation has been flagged by a filter. See :ref:`Filter Actions ` in the JEDI documentation for a full explanation and list of valid values. ``name`` The name of the desired action. Valid values include: ``accept`` | ``reject`` ``where`` - By default, filters are applied to all filter variables listed. The ``where`` keyword applies a filter only to observations meeting certain conditions. See the `Where Statement `__ section of the JEDI Documentation for a complete description of valid ``where`` conditions. + By default, filters are applied to all filter variables listed. The ``where`` keyword applies a filter only to observations meeting certain conditions. See the :ref:`Where Statement ` section of the JEDI Documentation for a complete description of valid ``where`` conditions. ``variable`` A list of variables to check using the ``where`` statement. @@ -488,7 +492,7 @@ Observation filters are used to define Quality Control (QC) filters. They have a ``name`` Name of a variable to check using the ``where`` statement. Multiple variable names may be listed under ``variable``. The conditions in the where statement will be applied to all of them. For example: - .. code-block:: console + .. code-block:: yaml filter: Domain Check # land only where: @@ -517,7 +521,7 @@ IODA provides a unified, model-agnostic method of sharing observation data and e The IODA file format represents observational field variables (e.g., temperature, salinity, humidity) and locations in two-dimensional tables, where the variables are represented by columns and the locations by rows. Metadata tables are associated with each axis of these data tables, and the location metadata hold the values describing each location (e.g., latitude, longitude). Actual data values are contained in a third dimension of the IODA data table; for instance: observation values, observation error, quality control flags, and simulated observation (H(x)) values. -Since the raw observational data come in various formats, a diverse set of "IODA converters" exists to transform the raw observation data files into IODA format. While many of these Python-based IODA converters have been developed to handle marine-based observations, users can utilize the "IODA converter engine" components to develop and implement their own IODA converters to prepare arbitrary observation types for data assimilation within JEDI. (See https://github.com/NOAA-PSL/land-DA_update/blob/develop/jedi/ioda/imsfv3_scf2ioda_obs40.py for the land DA IMS IODA converter.) +Since the raw observational data come in various formats, a diverse set of "IODA converters" exists to transform the raw observation data files into IODA format. While many of these Python-based IODA converters have been developed to handle marine-based observations, users can utilize the "IODA converter engine" components to develop and implement their own IODA converters to prepare arbitrary observation types for data assimilation within JEDI. (See https://github.com/NOAA-PSL/land-DA_update/blob/develop/jedi/ioda/imsfv3_scf2ioda_obs40.py for the Land DA IMS IODA converter.) Input Files @@ -528,12 +532,14 @@ The Land DA System requires grid description files, observation files, and resta Grid Description Files ========================= -The grid description files appear in :numref:`Section %s ` and are also used as input files to the Vector-to-Tile Converter. See :numref:`Table %s ` for a description of these files. +The grid description files appear in :numref:`Section %s ` and are also used as input files to the Vector-to-Tile Converter and the UFS land component. See :numref:`Table %s ` for a description of these files. + +.. _observation-data: Observation Data ==================== -Observation data from 2016 and 2020 are provided in NetCDF format for the v1.0.0 release. Instructions for downloading the data are provided in :numref:`Section %s `, and instructions for accessing the data on :ref:`Level 1 Systems ` are provided in :numref:`Section %s `. Currently, data is taken from the `Global Historical Climatology Network `__ (GHCN), but eventually, data from the U.S. National Ice Center (USNIC) Interactive Multisensor Snow and Ice Mapping System (`IMS `__) will also be available for use. +Observation data from 2000 and 2019 are provided in NetCDF format for the |latestr| release. Instructions for downloading the data are provided in :numref:`Section %s `, and instructions for accessing the data on :ref:`Level 1 Systems ` are provided in :numref:`Section %s `. Currently, data is taken from the `Global Historical Climatology Network `__ (GHCN), but eventually, data from the U.S. National Ice Center (USNIC) Interactive Multisensor Snow and Ice Mapping System (`IMS `__) will also be available for use. Observation Types -------------------- @@ -549,46 +555,48 @@ Snow depth observations are taken from the `Global Historical Climatology Networ where ``${YYYY}`` should be replaced with the year of interest. Note that these yearly tarballs contain all measurement types from the daily GHCN output, and thus, snow depth must be manually extracted from this broader data set. -These raw snow depth observations need to be converted into IODA-formatted netCDF files for ingestion into the JEDI LETKF system. However, this process was preemptively handled outside of the Land DA workflow, and the initial GHCN IODA files for 2016 and 2020 were provided by NOAA PSL (Clara Draper). +These raw snow depth observations need to be converted into IODA-formatted netCDF files for ingestion into the JEDI LETKF system. However, this process was preemptively handled outside of the Land DA workflow, and the 2019 GHCN IODA files were provided by NOAA PSL (Clara Draper). -The IODA-formatted GHCN files are structured as follows (using 20160102 as an example): +The IODA-formatted GHCN files are available in the ``inputs/DA/snow_depth/GHCN/data_proc/v3/`` directory and are structured as follows (using 20191221 as an example): .. code-block:: console - netcdf ghcn_snwd_ioda_20160102 { + netcdf ghcn_snwd_ioda_20191221 { dimensions: - nlocs = UNLIMITED ; // (9946 currently) + Location = 9379 ; variables: - int nlocs(nlocs) ; - nlocs:suggested_chunk_dim = 9946LL ; + int Location(Location) ; + Location:suggested_chunk_dim = 9379LL ; // global attributes: string :_ioda_layout = "ObsGroup" ; :_ioda_layout_version = 0 ; string :converter = "ghcn_snod2ioda_newV2.py" ; - string :date_time_string = "2016-01-02T18:00:00Z" ; - :nlocs = 9946 ; + string :date_time_string = "2019-12-21T18:00:00Z" ; + :nlocs = 9379 ; + :history = "Fri Aug 12 20:27:37 2022: ncrename -O -v altitude,height ./data_proc_test/nc4_ghcn_snwd_ioda_20191221.nc ./data_proc_Update/ghcn_snwd_ioda_20191221.nc" ; + :NCO = "netCDF Operators version 4.9.1 (Homepage = http://nco.sf.net, Code = http://github.com/nco/nco)" ; group: MetaData { variables: - string datetime(nlocs) ; - string datetime:_FillValue = "" ; - float height(nlocs) ; + int64 dateTime(Location) ; + dateTime:_FillValue = -2208988800LL ; + string dateTime:units = "seconds since 1970-01-01T00:00:00Z" ; + float height(Location) ; height:_FillValue = 9.96921e+36f ; - string height:units = "m" ; - float latitude(nlocs) ; + float latitude(Location) ; latitude:_FillValue = 9.96921e+36f ; string latitude:units = "degrees_north" ; - float longitude(nlocs) ; + float longitude(Location) ; longitude:_FillValue = 9.96921e+36f ; string longitude:units = "degrees_east" ; - string stationIdentification(nlocs) ; + string stationIdentification(Location) ; string stationIdentification:_FillValue = "" ; } // group MetaData group: ObsError { variables: - float totalSnowDepth(nlocs) ; + float totalSnowDepth(Location) ; totalSnowDepth:_FillValue = 9.96921e+36f ; string totalSnowDepth:coordinates = "longitude latitude" ; string totalSnowDepth:units = "mm" ; @@ -596,7 +604,7 @@ The IODA-formatted GHCN files are structured as follows (using 20160102 as an ex group: ObsValue { variables: - float totalSnowDepth(nlocs) ; + float totalSnowDepth(Location) ; totalSnowDepth:_FillValue = 9.96921e+36f ; string totalSnowDepth:coordinates = "longitude latitude" ; string totalSnowDepth:units = "mm" ; @@ -604,7 +612,7 @@ The IODA-formatted GHCN files are structured as follows (using 20160102 as an ex group: PreQC { variables: - int totalSnowDepth(nlocs) ; + int totalSnowDepth(Location) ; totalSnowDepth:_FillValue = -2147483647 ; string totalSnowDepth:coordinates = "longitude latitude" ; } // group PreQC @@ -618,16 +626,16 @@ Observation Location and Processing GHCN ^^^^^^ -GHCN files for 2016 and 2020 are already provided in IODA format for the v1.0.0 release. :numref:`Table %s ` indicates where users can find data on NOAA :term:`RDHPCS` platforms. Tar files containing the 2016 and 2020 data are located in the publicly-available `Land DA Data Bucket `__. Once untarred, the snow depth files are located in ``/inputs/DA/snow_depth/GHCN/data_proc/{YEAR}``. These GHCN IODA files were provided by Clara Draper (NOAA PSL). Each file follows the naming convention of ``ghcn_snwd_ioda_${YYYY}${MM}${DD}.nc``, where ``${YYYY}`` is the four-digit cycle year, ``${MM}`` is the two-digit cycle month, and ``${DD}`` is the two-digit cycle day. +GHCN files for 2000 and 2019 are already provided in IODA format for the |latestr| release. :numref:`Table %s ` indicates where users can find data on NOAA :term:`RDHPCS` platforms. Tar files containing the 2000 and 2019 data are located in the publicly-available `Land DA Data Bucket `__. Once untarred, the snow depth files are located in ``/inputs/DA/snow_depth/GHCN/data_proc/{YEAR}``. The 2019 GHCN IODA files were provided by Clara Draper (NOAA PSL). Each file follows the naming convention of ``ghcn_snwd_ioda_${YYYY}${MM}${DD}.nc``, where ``${YYYY}`` is the four-digit cycle year, ``${MM}`` is the two-digit cycle month, and ``${DD}`` is the two-digit cycle day. In each experiment, the ``DA_config`` file sets the name of the experiment configuration file. This configuration file is typically named ``settings_DA_test``. Before assimilation, if "GHCN" was specified as the observation type in the ``DA_config`` file, the ``ghcn_snwd_ioda_${YYYY}${MM}${DD}.nc`` file corresponding to the specified cycle date is soft-linked to the JEDI working directory (``${JEDIWORKDIR}``) with a naming-convention change (i.e., ``GHCN_${YYYY}${MM}${DD}${HH}.nc``). Here, the GHCN IODA file is appended with the cycle hour, ``${HH}`` which is extracted from the ``${STARTDATE}`` variable defined in the relevant ``DA_config`` file. -Prior to ingesting the GHCN IODA files via the LETKF at the DA analysis time, the observations are further quality controlled and checked using ``letkf_land.yaml`` (itself a concatenation of ``GHCN.yaml`` and ``letkfoi_snow.yaml``; see the `GitHub yaml files `__ for more detail). The GHCN-specific observation filters, domain checks, and quality control parameters from ``GHCN.yaml`` ensure that only snow depth observations which meet specific criteria are assimilated (the rest are rejected). The contents of this YAML are listed below: +Prior to ingesting the GHCN IODA files via the LETKF at the DA analysis time, the observations are further quality controlled and checked using ``letkf_land.yaml`` (itself a concatenation of ``GHCN.yaml`` and ``letkfoi_snow.yaml``; see the `GitHub yaml files `__ for more detail). The GHCN-specific observation filters, domain checks, and quality control parameters from ``GHCN.yaml`` ensure that only snow depth observations which meet specific criteria are assimilated (the rest are rejected). The contents of ``GHCN.yaml`` are listed below: -.. code-block:: console +.. code-block:: yaml - obs space: - name: Simulate + name: SnowDepthGHCN distribution: name: Halo halo size: 250e3 @@ -659,12 +667,12 @@ Prior to ingesting the GHCN IODA files via the LETKF at the DA analysis time, th - filter: Domain Check # missing station elevation (-999.9) where: - variable: - name: height@MetaData + name: MetaData/height minvalue: -999.0 - filter: Domain Check # land only where: - variable: - name: slmsk@GeoVaLs + name: GeoVaLs/slmsk minvalue: 0.5 maxvalue: 1.5 # GFSv17 only. @@ -676,7 +684,7 @@ Prior to ingesting the GHCN IODA files via the LETKF at the DA analysis time, th - filter: RejectList # no land-ice where: - variable: - name: vtype@GeoVaLs + name: GeoVaLs/vtype minvalue: 14.5 maxvalue: 15.5 - filter: Background Check # gross error check @@ -689,29 +697,20 @@ Prior to ingesting the GHCN IODA files via the LETKF at the DA analysis time, th Viewing NetCDF Files ----------------------- -Users can view file information and notes for NetCDF files using the ``ncdump`` module. First, load a compiler, MPI, and NetCDF modules: - -.. code-block:: console - - # To see available modules: - module avail - # To load modules: - module load intel/2022.2.0 impi/2022.2.0 netcdf/4.7.0 - -Users may need to modify the module load command to reflect modules that are available on their system. - -Then, run ``ncdump -h path/to/file``. For example, on Hera, users can run: +Users can view file information and notes for NetCDF files using the instructions in :numref:`Section %s `. For example, on Orion: .. code-block:: console - ncdump -h /scratch1/NCEPDEV/nems/role.epic/landda/inputs/DA/snow_depth/GHCN/data_proc/2016/ghcn_snwd_ioda_20160102.nc + # Load modules: + module load intel/2022.1.2 impi/2022.1.2 netcdf/4.7.4 + ncdump -h /work/noaa/epic/UFS_Land-DA/inputs/DA/snow_depth/GHCN/data_proc/v3/2019/ghcn_snwd_ioda_20191221.nc -to see the contents of the 2016-01-02 GHCN file. +to see the contents of the 2019-12-21 GHCN file on Hera. Users may need to modify the module load command and the file path to reflect module versions/file paths that are available on their system. Restart Files ================ -To restart the ``ufs-land-driver`` successfully after land model execution, all parameters, states, and fluxes used for a subsequent time iteration are stored in a restart file. This restart file is named ``ufs_land_restart.{FILEDATE}.nc`` where ``FILEDATE`` is in YYYY-MM-DD_HH-mm-SS format (e.g., ``ufs_land_restart.2016-01-02_18-00-00.nc``). The restart file contains all the model fields and their values at a specific point in time; this information can be used to restart the model immediately to run the next cycle. The Land DA System reads the states from the restart file and replaces them after the DA step with the updated analysis. :numref:`Table %s ` lists the fields in the Land DA restart file. Within the ``ufs-land-driver``, read/write of the restart file is performed in ``ufsLandNoahMPRestartModule.f90``. +To restart the UFS land driver successfully after land model execution, all parameters, states, and fluxes used for a subsequent time iteration are stored in a restart file. This restart file is named ``ufs_land_restart.{FILEDATE}.nc`` where ``FILEDATE`` is in YYYY-MM-DD_HH-mm-SS format (e.g., ``ufs_land_restart.2019-12-21_00-00-00.nc``). The restart file contains all the model fields and their values at a specific point in time; this information can be used to restart the model immediately to run the next cycle. The Land DA System reads the states from the restart file and replaces them after the DA step with the updated analysis. :numref:`Table %s ` lists the fields in the Land DA restart file. Within the UFS land driver (submodule ``ufs-land-driver-emc-dev``), read/write of the restart file is performed in ``ufsLandNoahMPRestartModule.f90``. .. _RestartFiles: @@ -891,8 +890,8 @@ Example of ``${FILEDATE}.coupler.res``: .. code-block:: console 2 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4) - 2016 1 2 18 0 0 Model start time: year, month, day, hour, minute, second - 2016 1 2 18 0 0 Current model time: year, month, day, hour, minute, second + 2019 12 22 0 0 0 Model start time: year, month, day, hour, minute, second + 2019 12 22 0 0 0 Current model time: year, month, day, hour, minute, second DA Workflow Overview ************************ @@ -930,53 +929,66 @@ The ``submit_cycle.sh`` script first exports the required paths and loads the re *Flowchart of 'submit_cycle.sh'* -As the script loops through the steps in the process for each cycle, it reads in the DA settings and selects a run type --- either DA or ``openloop`` (which skips DA). Required DA settings include: DA algorithm choice, directory paths for JEDI, Land_DA (where the ``do_landDA.sh`` script is located), JEDI's input observation options, DA window length, options for constructing ``yaml`` files, etc. +As the script loops through the steps in the process for each cycle, it reads in the DA settings and selects a run type --- either DA or ``openloop`` (which skips DA). Required DA settings include: DA algorithm choice, directory paths for JEDI, Land_DA (where the ``do_landDA.sh`` script is located), JEDI's input observation options, DA window length, options for constructing ``.yaml`` files, etc. Next, the system designates work and output directories and copies restart files into the working directory. If the DA option is selected, the script calls the ``vector2tile`` function and tries to convert the format of the Noah-MP model in vector space to the JEDI tile format used in :term:`FV3` cubed-sphere space. After the ``vector2tile`` is done, the script calls the data assimilation job script (``do_landDA.sh``) and runs this script. Then, the ``tile2vector`` function is called and converts the JEDI output tiles back to vector format. The converted vector outputs are saved, and the forecast model is run. Then, the script checks the existing model outputs. Finally, if the current date is less than the end date, this same procedure will be processed for the next cycle. .. note:: - The v1.0.0 release of Land DA does not support ensemble runs. Thus, the first ensemble member (``mem000``) is the only ensemble member. + The |latestr| release of Land DA does not support ensemble runs. Thus, the first ensemble member (``mem000``) is the only ensemble member. -Here is an example of configuration settings file, ``settings_cycle``, for the ``submit_cycle`` script: +Here is an example of a configuration settings file, ``settings_DA_cycle_era5``, for the ``submit_cycle.sh`` script: .. code-block:: console - export exp_name=DA_IMS_test - STARTDATE=2016010118 - ENDDATE=2016010318 + # experiment name + export exp_name=DA_ERA5_test + #export BASELINE=hera.internal - export WORKDIR=/*/*/ - export OUTDIR=/*/*/ + STARTDATE=2019122100 + ENDDATE=2019122200 + # Get commmon variables + source ./release.environment ############################ - # for LETKF, + #forcing options: gswp3, era5 + export atmos_forc=era5 + + # for LETKF, this is size of ensemble. + # for LETKF-OI pseudo ensemble, or non-ensemble runs use 1 export ensemble_size=1 + # length of each forecast export FCSTHR=24 - export atmos_forc=gdas - #FV3 resolution export RES=96 - export TPATH="/*/*/" - export TSTUB="oro_C96.mx100" - - # number of cycles + if [[ $BASELINE =~ 'hera.internal' ]]; then + export TPATH=/scratch2/NCEPDEV/land/data/fix/C96.mx100_frac/ + else + export TPATH="$LANDDA_INPUTS/forcing/${atmos_forc}/orog_files/" + fi + export TSTUB="oro_C96.mx100" # file stub for orography files in $TPATH + # oro_C${RES} for atm only, oro_C${RES}.mx100 for atm/ocean. + + # number of cycles to submit in a single job export cycles_per_job=1 # directory with initial conditions - export ICSDIR=/*/*/ + # can find some here:/scratch2/BMC/gsienkf/Clara.Draper/DA_test_cases/land-offline_workflow/offline_ICS/single + export ICSDIR=$LANDDAROOT/inputs/forcing/${atmos_forc}/orog_files/ # namelist for do_landDA.sh + # set to "openloop" to not call do_landDA.sh export DA_config="settings_DA_test" # if want different DA at different times, list here. - export DA_config00=${DA_config} - export DA_config06=${DA_config} - export DA_config12=${DA_config} - export DA_config18=${DA_config} + export DA_config00=${DA_config} + export DA_config06=${DA_config} + export DA_config12=${DA_config} + export DA_config18=${DA_config} + Parameters for ``submit_cycle.sh`` ------------------------------------- @@ -1042,9 +1054,9 @@ The ``do_landDA.sh`` runs the data assimilation job inside the ``sumbit_cycle.sh *Flowchart of 'do_landDA.sh'* -First, to run the DA job, ``do_landDA.sh`` reads in the configuration file and sets up the directories. The date strings are formatted for the current date and previous date. For each tile, restarts are staged to apply the JEDI update. In this stage, all files will get directly updated. Then, the observation files are read and prepared for this job. Once the JEDI type is determined, ``yaml`` files are constructed. Note that if the user specifies a ``yaml`` file, the script uses that one. Otherwise, the script builds the ``yaml`` files. For LETKF-OI, a pseudo-ensemble is created by running the python script (``letkf_create_ens.py``). Once the ensemble is created, the script runs JEDI and applies increment to UFS restarts. +First, to run the DA job, ``do_landDA.sh`` reads in the configuration file and sets up the directories. The date strings are formatted for the current date and previous date. For each tile, restarts are staged to apply the JEDI update. In this stage, all files will get directly updated. Then, the observation files are read and prepared for this job. Once the JEDI type is determined, ``.yaml`` files are constructed. Note that if the user specifies a ``.yaml`` file, the script uses that one. Otherwise, the script builds the ``.yaml`` files. For LETKF-OI, a pseudo-ensemble is created by running the python script (``letkf_create_ens.py``). Once the ensemble is created, the script runs JEDI and applies increment to UFS restarts. -Below, users can find an example of a configuration settings file, ``settings_DA``, for the ``do_landDA.sh`` script: +Below, users can find an excerpt of a configuration settings file, ``settings_DA_template``, for the ``do_landDA.sh`` script: .. code-block:: console @@ -1060,10 +1072,12 @@ Below, users can find an example of a configuration settings file, ``settings_DA # YAMLS YAML_DA=construct + YAML_HOFX=construct # JEDI DIRECTORIES - JEDI_EXECDIR= - fv3bundle_vn=20230106_public + #JEDI_EXECDIR= # JEDI FV3 build directory + #IODA_BUILD_DIR= # JEDI IODA-converter source directory + #fv3bundle_vn= # date for JEDI fv3 bundle checkout (used to select correct yaml) ``LANDDADIR`` Specifies the path to the ``do_landDA.sh`` script. @@ -1087,7 +1101,7 @@ Below, users can find an example of a configuration settings file, ``settings_DA Specifies the DA window length. It is generally the same as the ``FCSTLEN``. ``YAML_DA`` - Specifies whether to construct the ``yaml`` name based on requested observation types and their availabilities. Valid values: ``construct`` | *desired YAML name* + Specifies whether to construct the ``.yaml`` name based on requested observation types and their availabilities. Valid values: ``construct`` | *desired YAML name* +--------------------+--------------------------------------------------------+ | Value | Description | @@ -1098,7 +1112,7 @@ Below, users can find an example of a configuration settings file, ``settings_DA +--------------------+--------------------------------------------------------+ ``JEDI_EXECDIR`` - Specifies the JEDI FV3 build directory. If using different JEDI version, users will need to edit the ``yaml`` files with the desired directory path. + Specifies the JEDI FV3 build directory. If using different JEDI version, users will need to edit the ``.yaml`` files with the desired directory path. ``fv3bundle_vn`` - Specifies the date for JEDI ``fv3-bundle`` checkout (used to select correct ``yaml``). + Specifies the date for JEDI ``fv3-bundle`` checkout (used to select correct ``.yaml``). diff --git a/docs/source/Model.rst b/docs/source/CustomizingTheWorkflow/Model.rst similarity index 78% rename from docs/source/Model.rst rename to docs/source/CustomizingTheWorkflow/Model.rst index 1ab65aa6..48989911 100644 --- a/docs/source/Model.rst +++ b/docs/source/CustomizingTheWorkflow/Model.rst @@ -5,39 +5,189 @@ Noah-MP Land Surface Model ******************************** This chapter provides practical information on input files and parameters for the Noah-MP Land Surface Model (LSM) and its Vector-to-Tile Converter component. -For background information on the Noah-MP Land Surface Model (LSM), see :numref:`Section %s ` of the Introduction. +For background information on the Noah-MP LSM, see :numref:`Section %s ` of the Introduction. .. _InputFiles: Input Files ************** -The UFS land model requires multiple input files to run: static datasets +The UFS land model requires multiple input files to run, including static datasets (fix files containing climatological information, terrain, and land use -data), initial conditions and forcing files, and model configuration -files (such as namelists). Users may reference the `Community Noah-MP User's +data), initial conditions files, and forcing files. Users may reference the `Community Noah-MP User's Guide `__ for a detailed technical description of certain elements of the Noah-MP model. -There are several important files used to specify model parameters: -the static file (``ufs-land_C96_static_fields.nc``), -the initial conditions file (``ufs-land_C96_init_*.nc``), -and the model configuration file (``ufs-land.namelist.noahmp``). -These files and their parameters are described in the following subsections. -They are publicly available via the `Land DA Data Bucket `__. -Users can download the data and untar the file via the command line, replacing -``{YEAR}`` with the year for the desired data. Release data is currently -available for 2016 and 2020: +In both the land component and land driver implementations of Noah-MP, static file(s) and initial conditions file(s) specify model parameters. +These files are publicly available via the `Land DA data bucket `__. +Users can download the data and untar the file via the command line: .. _TarFile: .. code-block:: console - wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/landda-input-data-{YEAR}.tar.gz - tar xvfz landda-input-data-{YEAR}.tar.gz + wget https://noaa-ufs-land-da-pds.s3.amazonaws.com/current_land_da_release_data/v1.2.0/Landdav1.2.0_input_data.tar.gz + tar xvfz Landdav1.2.0_input_data.tar.gz + +These files and their parameters are described in the following subsections. + +.. note:: + + * Users who wish to use the UFS land component with GSWP3 data can proceed to the :numref:`Section %s `. + * Users who wish to run the land driver implementation of Land DA with ERA5 data should proceed to :numref:`Section %s `. + +.. _view-netcdf-files: + +Viewing netCDF Files +====================== + +Users can view file information and notes for NetCDF files using the ``ncdump`` module. First, load a compiler, MPI, and NetCDF modules: + +.. code-block:: console + + module load intel/2022.1.2 impi/2022.1.2 netcdf/4.7.4 + +To view information on the variables contained in a :term:`netCDF` file, users can run ``ncdump -h filename.nc``. Users will need to replace ``filename.nc`` with the actual name of the file they want to view. For example: + +.. code-block:: console + + ncdump -h /path/to/ufs-land_C96_init_fields.tile1.nc + +where ``/path/to/`` is replaced by the actual path to the file. Users may also need to modify the module load command to reflect modules that are available on their system. + +Alternatively, users on Level 1 platforms can load the Land DA environment, which contains the NetCDF module, from ``land-DA_workflow`` as described in :numref:`Section %s `. + +.. _datm-lnd-input-files: + +Input Files for the ``DATM`` + ``LND`` Configuration with GSWP3 data +====================================================================== + +With the integration of the UFS Noah-MP land component into the Land DA System in the v1.2.0 release, model forcing options have been enhanced so that users can run the UFS land component (:term:`LND`) with the data atmosphere component (:term:`DATM`). Updates provide a new analysis option on the cubed-sphere native grid using :term:`GSWP3` forcing data to run a single-day experiment for 2000-01-03. An artificial GHCN snow depth observation is provided for data assimilation (see :numref:`Section %s ` for more on GHCN files). The GHCN observations will be extended in the near future. A new configuration setting file is also provided (``settings_DA_cycle_gswp3``). + +On Level 1 platforms, the requisite data is pre-staged at the locations listed in :numref:`Section %s `. The data are also publicly available via the `Land DA Data Bucket `__. + +.. attention:: + + The DATM + LND option is only supported on Level 1 systems (i.e., Hera and Orion). It is not tested or supported using a container except on Hera and Orion. + +Forcing Files +--------------- + +:term:`Forcing files` for the land component configuration come from the Global Soil Wetness Project Phase 3 (`GSWP3 `__) dataset. They are located in the ``inputs/UFS_WM/DATM_GSWP3_input_data`` directory (downloaded :ref:`above `). + +.. code-block:: console + + clmforc.GSWP3.c2011.0.5x0.5.Prec.1999-12.nc + clmforc.GSWP3.c2011.0.5x0.5.Prec.2000-01.nc + clmforc.GSWP3.c2011.0.5x0.5.Solr.1999-12.nc + clmforc.GSWP3.c2011.0.5x0.5.Solr.2000-01.nc + clmforc.GSWP3.c2011.0.5x0.5.TPQWL.1999-12.nc + clmforc.GSWP3.c2011.0.5x0.5.TPQWL.2000-01.nc + clmforc.GSWP3.c2011.0.5x0.5.TPQWL.SCRIP.210520_ESMFmesh.nc + fv1.9x2.5_141008_ESMFmesh.nc + topodata_0.9x1.25_USGS_070110_stream_c151201.nc + topodata_0.9x1.SCRIP.210520_ESMFmesh.nc + +These files provide atmospheric forcing data related to precipitation, solar radiation, longwave radiation, temperature, pressure, winds, humidity, topography, and mesh data. + +Noah-MP Initial Conditions +---------------------------- + +The offline Land DA System currently only supports snow DA. +The initial conditions files include the initial state variables that are required for the UFS land snow DA to begin a cycling run. The data must be provided in :term:`netCDF` format. + +By default, on Level 1 systems and in the Land DA data bucket, the initial conditions files are located at ``inputs/UFS_WM/NOAHMP_IC`` (downloaded :ref:`above `). Each file corresponds to one of the six tiles of the `global FV3 grid `__. + +.. code-block:: console + + ufs-land_C96_init_fields.tile*.nc + +The files contain the following data: + +.. list-table:: *Variables specified in the initial conditions file ``ufs-land_C96_init_fields.tile*.nc``* + :header-rows: 1 + + * - Variables + - Long Name + - Units + * - time(time) + - "time" + - "seconds since 1970-01-01 00:00:00" + * - geolat(lat, lon) + - "latitude" + - "degrees_north" + * - geolon(lat, lon) + - "longitude" + - "degrees_east" + * - snow_water_equivalent(time, lat, lon) + - "snow water equivalent" + - "mm" + * - snow_depth(time, lat, lon) + - "snow depth" + - "m" + * - canopy_water(time, lat, lon) + - "canopy surface water" + - "mm" + * - skin_temperature(time, lat, lon) + - "surface skin temperature" + - "K" + * - soil_temperature(time, soil_levels, lat, lon) + - "soil temperature" + - "K" + * - soil_moisture(time, soil_levels, lat, lon) + - "volumetric soil moisture" + - "m3/m3" + * - soil_liquid(time, soil_levels, lat, lon) + - "volumetric soil liquid" + - "m3/m3" + + +FV3_fix_tiled Files +--------------------- + +The UFS land component also requires a series of tiled static (fix) files that will be used by the component model. These files contain information on maximum snow albedo, slope type, soil color and type, substrate temperature, vegetation greenness and type, and orography (grid and land mask information). These files are located in the ``inputs/UFS_WM/FV3_fix_tiled/C96/`` directory (downloaded :ref:`above `). + +.. code-block:: console + + C96.maximum_snow_albedo.tile*.nc + C96.slope_type.tile*.nc + C96.soil_type.tile*.nc + C96.soil_color.tile*.nc + C96.substrate_temperature.tile*.nc + C96.vegetation_greenness.tile*.nc + C96.vegetation_type.tile*.nc + oro_C96.mx100.tile*.nc + +FV3_input_data +---------------- + +The ``FV3_input_data`` directory contains grid information used by the model. This grid information is located in ``inputs/UFS_WM/FV3_input_data/INPUT`` (downloaded :ref:`above `). + +.. code-block:: console + + C96_grid.tile*.nc + grid_spec.nc # aka C96.mosaic.nc + +The ``C96_grid.tile*.nc`` files contain grid information for tiles 1-6 at C96 grid resolution. The ``grid_spec.nc`` file contains information on the mosaic grid. + +.. note:: + + ``grid_spec.nc`` and ``C96.mosaic.nc`` are the same file under different names and may be used interchangeably. + +.. _land-driver-input-files: + +Input Files for the Land Driver Configuration with ERA5 Data +============================================================== + +There are several important files used to specify model parameters in the land driver-based offline Land DA System: +the static file (``ufs-land_C96_static_fields.nc``), +the initial conditions file (``ufs-land_C96_init_*.nc``), +and the model configuration file (``ufs-land.namelist.noahmp``). +These files and their parameters are described in the following subsections. +They are publicly available via the `Land DA Data Bucket `__. Static File (``ufs-land_C96_static_fields.nc``) -================================================= +------------------------------------------------- The static file includes specific information on location, time, soil layers, and fixed (invariant) experiment parameters that are required for Noah-MP to run. The data must be provided in :term:`netCDF` format. @@ -45,11 +195,9 @@ The static file is available in the ``inputs`` data directory (downloaded :ref:` .. code-block:: - inputs/forcing//static/ufs-land_C96_static_fields.nc - -where ```` is either ``gdas`` or ``era5``. + inputs/forcing/era5/static/ufs-land_C96_static_fields.nc -.. table:: Configuration variables specified in the static file (ufs-land_C96_static_fields.nc) +.. table:: *Configuration variables specified in the static file* (ufs-land_C96_static_fields.nc) +---------------------------+------------------------------------------+ | Configuration Variables | Description | @@ -105,7 +253,7 @@ where ```` is either ``gdas`` or ``era5``. +---------------------------+------------------------------------------+ Initial Conditions File (``ufs-land_C96_init_*.nc``) -================================================================= +------------------------------------------------------ The offline Land DA System currently only supports snow DA. The initial conditions file includes the initial state variables that are required for the UFS land snow DA to begin a cycling run. The data must be provided in :term:`netCDF` format. @@ -114,8 +262,7 @@ The initial conditions file is available in the ``inputs`` data directory (downl .. code-block:: - inputs/forcing/GDAS/init/ufs-land_C96_init_fields_1hr.nc - inputs/forcing/ERA5/init/ufs-land_C96_init_2010-12-31_23-00-00.nc + inputs/forcing/era5/init/ufs-land_C96_init_2010-12-31_23-00-00.nc .. table:: Configuration variables specified in the initial forcing file (ufs-land_C96_init_fields_1hr.nc) @@ -150,7 +297,7 @@ The initial conditions file is available in the ``inputs`` data directory (downl +-----------------------------+----------------------------------------+ Model Configuration File (``ufs-land.namelist.noahmp``) -========================================================== +---------------------------------------------------------- The UFS land model uses a series of template files combined with user-selected settings to create required namelists and parameter @@ -163,7 +310,7 @@ from the ``template.ufs-noahMP.namelist.*`` file. Any default values indicated are the defaults set in the ``template.ufs-noahMP.namelist.*`` files. Run Setup Parameters ----------------------- +^^^^^^^^^^^^^^^^^^^^^^ ``static_file`` Specifies the path to the UFS land static file. @@ -240,7 +387,7 @@ Run Setup Parameters Specifies the number of timesteps to run. Land Model Options ---------------------- +^^^^^^^^^^^^^^^^^^^^^ ``land_model`` Specifies which land surface model to use. Valid values: ``1`` | ``2`` @@ -254,7 +401,7 @@ Land Model Options +--------+-------------+ Structure-Related Parameters -------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``num_soil_levels`` Specifies the number of soil levels. @@ -263,7 +410,7 @@ Structure-Related Parameters Specifies the forcing height in meters. Soil Setup Parameters ------------------------ +^^^^^^^^^^^^^^^^^^^^^^^ ``soil_level_thickness`` Specifies the thickness (in meters) of each of the soil layers (top layer to bottom layer). @@ -272,7 +419,7 @@ Soil Setup Parameters Specifies the soil level centroids from the surface (in meters). Noah-MP Options ------------------------- +^^^^^^^^^^^^^^^^^^ ``dynamic_vegetation_option``: (Default: ``4``) Specifies the dynamic vegetation model option. Valid values: ``1`` | ``2`` | ``3`` | ``4`` | ``5`` | ``6`` | ``7`` | ``8`` | ``9`` | ``10`` @@ -350,7 +497,7 @@ Noah-MP Options +--------+-----------------------------------------------------------------------+ | 4 | BATS surface and subsurface runoff (free drainage) | +--------+-----------------------------------------------------------------------+ - | 5 | Miguez-Macho&Fan groundwater scheme (:cite:t:`Miguez-MachoEtAl2007`; | + | 5 | Miguez-Macho & Fan groundwater scheme (:cite:t:`Miguez-MachoEtAl2007`;| | | :cite:t:`FanEtAl2007`) | +--------+-----------------------------------------------------------------------+ @@ -513,7 +660,7 @@ Noah-MP Options +--------+------------------------------------------------+ Forcing Parameters ---------------------- +^^^^^^^^^^^^^^^^^^^^^ ``forcing_timestep_seconds``: (Default: ``3600``) Specifies the forcing timestep in seconds. @@ -530,11 +677,8 @@ Forcing Parameters +----------------+-----------------------------------------------------+ | gdas | hourly forcing stored in daily files | +----------------+-----------------------------------------------------+ - - .. note:: - - There is no separate ``era5`` format. It is the same as the ``gdas`` format, - so users should select ``gdas`` for this parameter when using ``era5`` forcing. + | era5 | hourly forcing stored in daily files | + +----------------+-----------------------------------------------------+ ``forcing_filename`` Specifies the forcing file name prefix. A date will be appended to this prefix. For example: ``C96_ERA5_forcing_2020-10-01.nc``. The prefix merely indicates which grid (``C96``) and source (i.e., GDAS, GEFS) will be used. @@ -589,7 +733,7 @@ Forcing Parameters Specifies the variable name of forcing longwave radiation. Example Namelist Entry --------------------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^ The ``ufs-land.namelist.noahmp`` file should be similar to the following example, which comes from the ``template.ufs-noahMP.namelist.gdas`` file. @@ -597,12 +741,13 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example &run_setup - static_file = "/LANDDA_INPUTS/forcing/gdas/static/ufs-land_C96_static_fields.nc" - init_file = "/LANDDA_INPUTS/forcing/gdas/init/ufs-land_C96_init_fields_1hr.nc" - forcing_dir = "/LANDDA_INPUTS/forcing/gdas/gdas/forcing" - + static_file = "/LANDDA_INPUTS/forcing/era5/static/ufs-land_C96_static_fields.nc" + init_file = "/LANDDA_INPUTS/forcing/era5/init/ufs-land_C96_init_2010-12-31_23-00-00.nc" + forcing_dir = "/LANDDA_INPUTS/forcing/era5/datm/C96/" + separate_output = .false. - output_dir = "./" + output_dir = "./noahmp_output/" + output_frequency_s = 0 restart_frequency_s = XXFREQ restart_simulation = .true. @@ -611,12 +756,12 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example timestep_seconds = 3600 - ! simulation_start is required - ! either set simulation_end or run_* or run_timesteps, priority - ! 1. simulation_end 2. run_[days/hours/minutes/seconds] 3. run_timesteps + ! simulation_start is required + ! either set simulation_end or run_* or run_timesteps, priority + ! 1. simulation_end 2. run_[days/hours/minutes/seconds] 3. run_timesteps - simulation_start = "2000-01-01 00:00:00" ! start date [yyyy-mm-dd hh:mm:ss] - ! simulation_end = "1999-01-01 06:00:00" ! end date [yyyy-mm-dd hh:mm:ss] + simulation_start = "2011-01-01 00:00:00" ! start date [yyyy-mm-dd hh:mm:ss] + ! simulation_end = "1999-01-01 06:00:00" ! end date [yyyy-mm-dd hh:mm:ss] run_days = XXRDD ! number of days to run run_hours = XXRHH ! number of hours to run @@ -626,8 +771,8 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example run_timesteps = 0 ! number of timesteps to run location_start = 1 - location_end = 18360 - + location_end = 18322 + / &land_model_option @@ -636,7 +781,7 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example &structure num_soil_levels = 4 ! number of soil levels - forcing_height = 6 ! forcing height [m] + forcing_height = 10 ! forcing height [m] / &soil_setup @@ -664,11 +809,13 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example &forcing forcing_timestep_seconds = 3600 - forcing_type = "gdas" - forcing_filename = "C96_GDAS_forcing_" + forcing_regrid = "none" + forcing_regrid_weights_filename= "" + forcing_type = "dd_1h" + forcing_filename = "C96_ERA5_forcing_" forcing_interp_solar = "linear" ! gswp3_zenith or linear forcing_time_solar = "instantaneous" ! gswp3_average or instantaneous - forcing_name_precipitation = "precipitation_conserve" + forcing_name_precipitation = "precipitation_bilinear" forcing_name_temperature = "temperature" forcing_name_specific_humidity = "specific_humidity" forcing_name_wind_speed = "wind_speed" @@ -677,6 +824,16 @@ The ``ufs-land.namelist.noahmp`` file should be similar to the following example forcing_name_lw_radiation = "longwave_radiation" / + &io + output_names = "snow_water_equiv", + "snow_depth", + "temperature_snow" + daily_mean_names = "" + monthly_mean_names = "" + solar_noon_names = "" + restart_names = "" + / + .. _VectorTileConverter: @@ -698,19 +855,21 @@ The input files containing grid information are listed in :numref:`Table %s `__ (WM). Its data assimilation framework uses -the Joint Effort for Data assimilation Integration (:term:`JEDI`) software. The offline UFS Land Data Assimilation (Land DA) System currently only works with snow data. -Thus, this User's Guide focuses primarily on the snow DA process. - -This User's Guide is organized as follows: - - * This chapter (Introduction) provides background information on the Unified Forecast System (:term:`UFS`) and the NoahMP model. - * :numref:`Chapter %s ` (Technical Overview) outlines prerequisites, user support levels, and directory structure. - * :numref:`Chapter %s ` (Land DA Workflow [Hera & Orion]) explains how to build and run the Land DA System on :ref:`Level 1 ` systems (currently Hera and Orion). - * :numref:`Chapter %s ` (Land DA Workflow [in a Container]) explains how to build and run the containerized Land DA System on non-Level 1 systems. - * :numref:`Chapter %s ` (Model) provides information on input data and configuration parameters in the Noah-MP LSM and its Vector-to-Tile Converter. - * :numref:`Chapter %s ` (DA Framework) provides information on the DA system, required data, and configuration parameters. - * :numref:`Chapter %s ` (Glossary) lists important terms. - -Users and developers may post questions and exchange information on the Land DA System's `GitHub Discussions `__ forum if their concerns are not addressed in this User's Guide. - -The Land DA System citation is as follows and should be used when presenting results based on research conducted with the Land DA System: - -UFS Development Team. (2023, March 6). Unified Forecast System (UFS) Land Data Assimilation (DA) System (Version v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.7675721 - - -.. _Background: - -Background Information -************************ - -Unified Forecast System (UFS) -=============================== - -The UFS is a community-based, coupled, comprehensive Earth modeling system. It includes `multiple applications `__ that support different forecast durations and spatial domains. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from many different modeling systems. For example, the UFS-based Global Forecast System -(`GFS `__) -and the Global Ensemble Forecast System -(`GEFS `__) are currently in operational use. -The UFS is designed to enable research, development, and contribution -opportunities within the broader :term:`Weather Enterprise` (including -government, industry, and academia). For more information about the UFS, visit the `UFS Portal `__. - - -.. _NoahMP: - -Noah-MP -========== - -The offline Noah-MP LSM is a stand-alone, uncoupled model used to execute land surface simulations. In this traditional uncoupled mode, near-surface atmospheric :term:`forcing data` are required as input forcing. This LSM simulates soil moisture (both liquid and frozen), soil temperature, skin temperature, snow depth, snow water equivalent (SWE), snow density, canopy water content, and the energy flux and water flux terms of the surface energy balance and surface water balance. - -Noah-MP uses a big-leaf approach with a separated vegetation canopy accounting -for vegetation effects on surface energy and water balances, a modified two-stream -approximation scheme to include the effects of vegetation canopy gaps that vary -with solar zenith angle and the canopy 3-D structure on radiation transfer, -a 3-layer physically-based snow model, a more permeable frozen soil by separating -a grid cell into a permeable fraction and impermeable fraction, a simple -groundwater model with a TOPMODEL-based runoff scheme, and a short-term leaf -phenology model. Noah-MP LSM enables a modular framework for diagnosing differences -in process representation, facilitating ensemble forecasts and uncertainty -quantification, and choosing process presentations appropriate for the application. -Noah-MP developers designed multiple parameterization options for leaf dynamics, -radiation transfer, stomatal resistance, soil moisture stress factor for stomatal -resistance, aerodynamic resistance, runoff, snowfall, snow surface albedo, -supercooled liquid water in frozen soil, and frozen soil permeability. - -The Noah-MP LSM has evolved through community efforts to pursue and refine a modern-era LSM suitable for use in the National Centers for Environmental Prediction (NCEP) operational weather and climate prediction models. This collaborative effort continues with participation from entities such as NCAR, NCEP, NASA, and university groups. - -Noah-MP has been implemented in the UFS via the :term:`CCPP` physics package and -is currently being tested for operational use in GFSv17 and RRFS v2. Noah-MP has -also been used operationally in the NOAA National Water Model (NWM) since 2016. Details about the model's physical parameterizations can be found in :cite:t:`NiuEtAl2011` (2011). - -Disclaimer -************* - -The United States Department of Commerce (DOC) GitHub project code is -provided on an “as is” basis and the user assumes responsibility for its -use. DOC has relinquished control of the information and no longer has a -responsibility to protect the integrity, confidentiality, or -availability of the information. Any claims against the Department of -Commerce stemming from the use of its GitHub project will be governed by -all applicable Federal laws. Any reference to specific commercial -products, processes, or services by service mark, trademark, -manufacturer, or otherwise, does not constitute or imply their -endorsement, recommendation, or favoring by the Department of Commerce. -The Department of Commerce seal and logo, or the seal and logo of a DOC -bureau, shall not be used in any manner to imply endorsement of any -commercial product or activity by DOC or the United States Government. - -References -************* - -.. bibliography:: references.bib \ No newline at end of file diff --git a/docs/source/Glossary.rst b/docs/source/Reference/Glossary.rst similarity index 80% rename from docs/source/Glossary.rst rename to docs/source/Reference/Glossary.rst index d3886647..97009da0 100644 --- a/docs/source/Glossary.rst +++ b/docs/source/Reference/Glossary.rst @@ -18,11 +18,14 @@ Glossary data assimilation One of the major sources of error in weather and climate forecasts is uncertainty related to the initial conditions that are used to generate future predictions. Even the most precise instruments have a small range of unavoidable measurement error, which means that tiny measurement errors (e.g., related to atmospheric conditions and instrument location) can compound over time. These small differences result in very similar forecasts in the short term (i.e., minutes, hours), but they cause widely divergent forecasts in the long term. Errors in weather and climate forecasts can also arise because models are imperfect representations of reality. Data assimilation systems seek to mitigate these problems by combining the most timely observational data with a "first guess" of the atmospheric state (usually a previous forecast) and other sources of data to provide a "best guess" analysis of the atmospheric state to start a weather or climate simulation. When combined with an "ensemble" of model runs (many forecasts with slightly different conditions), data assimilation helps predict a range of possible atmospheric states, giving an overall measure of uncertainty in a given forecast. + ERA5 + The ECMWF Reanalysis v5 (`ERA5 `__) dataset "is the fifth generation ECMWF atmospheric reanalysis of the global climate covering the period from January 1940 to present." It "provides hourly estimates of a large number of atmospheric, land and oceanic climate variables." + ESMF `Earth System Modeling Framework `__. The ESMF defines itself as "a suite of software tools for developing high-performance, multi-component Earth science modeling applications." It is a community-developed software infrastructure for building and coupling models. FMS - The Flexible Modeling System (FMS) is a software framework for supporting the efficient + The Flexible Modeling System (`FMS `__) is a software framework for supporting the efficient development, construction, execution, and scientific interpretation of atmospheric, oceanic, and climate system models. @@ -32,6 +35,9 @@ Glossary FV3 The Finite-Volume Cubed-Sphere dynamical core (dycore). Developed at NOAA’s `Geophysical Fluid Dynamics Laboratory `__ (GFDL), it is a scalable and flexible dycore capable of both hydrostatic and non-hydrostatic atmospheric simulations. It is the dycore used in the UFS Weather Model. + GSWP3 + The Global Soil Wetness Project Phase 3 (`GSWP3 `__) dataset is a century-long comprehensive set of data documenting several variables for hydro-energy-eco systems. + JEDI The Joint Effort for Data assimilation Integration (`JEDI `__) is a unified and versatile data assimilation (DA) system for Earth System Prediction. It aims to enable efficient research and accelerated transition from research to operations by providing a framework that takes into account all components of the Earth system in a consistent manner. The JEDI software package can run on a variety of platforms and for a variety of purposes, and it is designed to readily accommodate new atmospheric and oceanic models and new observation systems. The `JEDI User's Guide `__ contains extensive information on the software. @@ -40,6 +46,9 @@ Glossary HPC High-Performance Computing. + LETKF-OI + Local Ensemble Transform Kalman Filter-Optimal Interpolation (see :cite:t:`HuntEtAl2007`, 2007). + MPI MPI stands for Message Passing Interface. An MPI is a standardized communication system used in parallel programming. It establishes portable and efficient syntax for the exchange of messages and data between multiple processors that are used by a single computer program. An MPI is required for high-performance computing (HPC) systems. @@ -68,7 +77,7 @@ Glossary `Spack `__ is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms and environments. It was designed for large supercomputing centers, where many users and application teams share common installations of software on clusters with exotic architectures. spack-stack - The `spack-stack `__ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. + The `spack-stack `__ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. UFS The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system consisting of several applications (apps). These apps span regional to global domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufscommunity.org/. diff --git a/docs/source/Reference/index.rst b/docs/source/Reference/index.rst new file mode 100644 index 00000000..f2c320a6 --- /dev/null +++ b/docs/source/Reference/index.rst @@ -0,0 +1,9 @@ +.. _reference-index: + +Reference +=========== + +.. toctree:: + :maxdepth: 3 + + Glossary diff --git a/docs/source/conf.py b/docs/source/conf.py index ad9ba194..2a23b7c9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,16 +11,17 @@ author = ' ' # The short X.Y version -version = 'v1.0' +version = 'v1.2' # The full version, including alpha/beta/rc tags -release = 'v1.0.0' +release = 'v1.2.0' numfig = True # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -extensions = ['sphinx_rtd_theme', +extensions = [ + 'sphinx_rtd_theme', 'sphinx.ext.intersphinx', 'sphinxcontrib.bibtex', ] @@ -42,6 +43,12 @@ # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' +# Documentation-wide substitutions + +rst_prolog = """ +.. |latestr| replace:: v1.2.0 +.. |tag| replace:: ``ufs-land-da-v1.2.0`` +""" # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output @@ -56,8 +63,10 @@ # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} -html_theme_options = {"body_max_width": "none"} +html_theme_options = { + "body_max_width": "none", + 'navigation_depth': 6, + } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -68,3 +77,11 @@ def setup(app): app.add_css_file('custom.css') # may also be an URL app.add_css_file('theme_overrides.css') # may also be a URL + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'jedi': ('https://jointcenterforsatellitedataassimilation-jedi-docs.readthedocs-hosted.com/en/1.7.0', None), + 'spack-stack': ('https://spack-stack.readthedocs.io/en/1.3.0/', None), + 'ufs-wm': ('https://ufs-weather-model.readthedocs.io/en/latest/', None), + 'gswp3': ('https://hydro.iis.u-tokyo.ac.jp/GSWP3/', None), +} diff --git a/docs/source/index.rst b/docs/source/index.rst index 30569a65..69855368 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,27 +1,16 @@ .. UFS Offline LandDA documentation master file, created by sphinx-quickstart on Fri Jan 20 10:35:26 2023. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. -Welcome to UFS Offline LandDA's documentation! -============================================== +UFS Offline Land DA User's Guide |release| +============================================ .. toctree:: :numbered: :maxdepth: 3 :caption: Contents: - Introduction - TechnicalOverview - BuildRunLandDA - Container - Model - DASystem - Glossary + BackgroundInfo/index + BuildingRunningTesting/index + CustomizingTheWorkflow/index + Reference/index -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/settings_DA_cycle_era5 b/settings_DA_cycle_era5 index a5c68dc0..34d04991 100644 --- a/settings_DA_cycle_era5 +++ b/settings_DA_cycle_era5 @@ -4,7 +4,6 @@ export exp_name=DA_ERA5_test #export BASELINE=hera.internal -#ERA5 forcing is available for 2020-2021 STARTDATE=2019122100 ENDDATE=2019122200 diff --git a/settings_DA_cycle_era5_restart b/settings_DA_cycle_era5_restart deleted file mode 100644 index 8da63312..00000000 --- a/settings_DA_cycle_era5_restart +++ /dev/null @@ -1,45 +0,0 @@ -# Settings file for submit_cycle, for running the DA_IMS_test - -# experiment name -export exp_name=DA_GHCN_test - -#ERA5 forcing is available for 2020-2021 -STARTDATE=2020100318 -ENDDATE=2020100418 - -# Get commmon variables -source ./release.environment -############################ - -#forcing options: gdas, era5 -export atmos_forc=era5 - -# for LETKF, this is size of ensemble. -# for LETKF-OI pseudo ensemble, or non-ensemble runs use 1 -export ensemble_size=1 - -# length of each forecast -export FCSTHR=24 - -#FV3 resolution -export RES=96 -export TPATH="$LANDDA_INPUTS/forcing/${atmos_forc}/orog_files/" -export TSTUB="oro_C96.mx100" # file stub for orography files in $TPATH - # oro_C${RES} for atm only, oro_C${RES}.mx100 for atm/ocean. - -# number of cycles to submit in a single job -export cycles_per_job=2 - -# directory with initial conditions -# can find some here:/scratch2/BMC/gsienkf/Clara.Draper/DA_test_cases/land-offline_workflow/offline_ICS/single -export ICSDIR=$LANDDAROOT/inputs/forcing/${atmos_forc}/orog_files/ - -# namelist for do_landDA.sh -# set to "openloop" to not call do_landDA.sh -export DA_config="settings_DA_test" - -# if want different DA at different times, list here. -export DA_config00=${DA_config} -export DA_config06=${DA_config} -export DA_config12=${DA_config} -export DA_config18=${DA_config} diff --git a/vector2tile b/vector2tile new file mode 160000 index 00000000..2fb5c1b4 --- /dev/null +++ b/vector2tile @@ -0,0 +1 @@ +Subproject commit 2fb5c1b493b3c03809e081f6fe8e8e76a40a100d diff --git a/vector2tile/.gitignore b/vector2tile/.gitignore deleted file mode 100644 index 18de9433..00000000 --- a/vector2tile/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -vector2tile_converter.exe -vector2tile_converter.o -*.mod -*.o diff --git a/vector2tile/CMakeLists.txt b/vector2tile/CMakeLists.txt deleted file mode 100644 index 81ac1f91..00000000 --- a/vector2tile/CMakeLists.txt +++ /dev/null @@ -1,76 +0,0 @@ -# (C) Copyright 2022 . -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - - -cmake_minimum_required( VERSION 3.12) -project( vector2tile VERSION 2022.10 LANGUAGES Fortran ) - -# ecbuild integration -find_package(ecbuild 3.3.2 REQUIRED) -include( ecbuild_system NO_POLICY_SCOPE ) -ecbuild_declare_project() - -list( APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) -include( vector2tile_compiler_flags ) - -################################################################################ -# Dependencies -################################################################################ - -find_package(OpenMP COMPONENTS C Fortran) -find_package(MPI REQUIRED COMPONENTS C Fortran) -find_package(NetCDF REQUIRED COMPONENTS Fortran ) - -################################################################################ -# Sources -################################################################################ - -set( VECTOR2TILE_LINKER_LANGUAGE Fortran ) - -list ( APPEND src_files -./namelist_mod.f90 -./vector2tile_perturbation_mod.f90 -./vector2tile_restart_mod.f90 -) - -ecbuild_add_library( TARGET vector2tile - SOURCES ${src_files} - INSTALL_HEADERS LISTED - LINKER_LANGUAGE ${VECTOR2TILE_LINKER_LANGUAGE} - ) - - -target_link_libraries(vector2tile PUBLIC NetCDF::NetCDF_Fortran) -target_link_libraries(vector2tile PUBLIC MPI::MPI_Fortran) -target_link_libraries(vector2tile PUBLIC OpenMP::OpenMP_C OpenMP::OpenMP_Fortran) - -# Fortran module output directory for build and install interfaces -set(MODULE_DIR module/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}/${CMAKE_Fortran_COMPILER_VERSION}) -set_target_properties(${PROJECT_NAME} PROPERTIES Fortran_MODULE_DIRECTORY ${CMAKE_BINARY_DIR}/${MODULE_DIR}) -install(DIRECTORY ${CMAKE_BINARY_DIR}/${MODULE_DIR}/ DESTINATION ${MODULE_DIR}) -target_include_directories(${PROJECT_NAME} INTERFACE - $ - $) - -set( VECTOR2TILE_INCLUDE_DIRS ${CMAKE_Fortran_MODULE_DIRECTORY} ) -set( VECTOR2TILE_LIBRARIES vector2tile ) - -# vector2tile_converter.exe executable -#------------------------------------------------------------------------------- -set ( exe_files vector2tile_driver.f90 ) - -ecbuild_add_executable( TARGET vector2tile_converter.exe - SOURCES ${exe_files} - LIBS vector2tile - LINKER_LANGUAGE ${VECTOR2TILE_LINKER_LANGUAGE} - ) - - -################################################################################ -# Finalise configuration -################################################################################ - -ecbuild_install_project( NAME vector2tile ) -ecbuild_print_summary() diff --git a/vector2tile/Makefile b/vector2tile/Makefile deleted file mode 100644 index 45308c7e..00000000 --- a/vector2tile/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -# Makefile -# -.SUFFIXES: -.SUFFIXES: .o .f90 - -include ./user_build_config - -OBJS = namelist_mod.o vector2tile_restart_mod.o vector2tile_perturbation_mod.o vector2tile_driver.o - -all: vector2tile_converter.exe - -.f90.o: - $(COMPILERF90) -c $(F90FLAGS) $(FREESOURCE) $(NETCDFMOD) $(*).f90 - -vector2tile_converter.exe: $(OBJS) - $(COMPILERF90) -o $(@) $(F90FLAGS) $(FREESOURCE) $(NETCDFMOD) $(OBJS) $(NETCDFLIB) - -clean: - rm -f *.o *.mod *.exe - - -# -# Dependencies: -# diff --git a/vector2tile/README.md b/vector2tile/README.md deleted file mode 100644 index 69c46d60..00000000 --- a/vector2tile/README.md +++ /dev/null @@ -1,30 +0,0 @@ -Code to map between vector format used by the Noah-MP offline driver, and the tile format used by the UFS atmospheric model. Currently used to prepare input tile files for JEDI. These files include only those fields required by JEDI, rather than the full restart. Can also be used to map stochastic_physics output to the tile or vector. Stochastic physics output files depend on the layout, with 1x1 layout giving one file per tile. - -Mike Barlage, Clara Draper. Dec 2021. - -To compile on hera: - ->configure - - choose hera - - load the modules indicated - ->make - -To run: - ->vector2tile_converter.exe namelist.vector2tile - -the namelist defines the conversion direction and the paths of the files - -Details: - -the vector2tile pathway assumes that the vector file exists in the vector_restart_path directory and overwrites/creates tile files in the output_path - -the tile2vector pathway is a little tricky, it assumes the tile files exist in tile_restart_path and overwrites only the snow variables in the vector file in the output_path. If the vector file does not exist in output_path the process will fail. - -the overall assumption here is that we will have a full model vector restart file, then convert the vector to tiles for only snow variables, then convert the updated snow variables back to the full vector restart file - -For the lndp2vector or lndp2tile option, a new file will be created with the pertbations and lat lon only. - diff --git a/vector2tile/cmake/compiler_flags_GNU_CXX.cmake b/vector2tile/cmake/compiler_flags_GNU_CXX.cmake deleted file mode 100755 index dbe13c02..00000000 --- a/vector2tile/cmake/compiler_flags_GNU_CXX.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# (C) Copyright 2018 UCAR. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -#################################################################### -# FLAGS COMMON TO ALL BUILD TYPES -#################################################################### - -if( HAVE_OMP ) - set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-deprecated-declarations -fopenmp") -else( ) - set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-deprecated-declarations -fno-openmp") -endif( ) - -#################################################################### -# RELEASE FLAGS -#################################################################### - -set( CMAKE_CXX_FLAGS_RELEASE "-O3" ) - -#################################################################### -# DEBUG FLAGS -#################################################################### - -set( CMAKE_CXX_FLAGS_DEBUG "-O0 -g" ) - -#################################################################### -# BIT REPRODUCIBLE FLAGS -#################################################################### - -set( CMAKE_CXX_FLAGS_BIT "-O2" ) - -#################################################################### -# LINK FLAGS -#################################################################### - -set( CMAKE_CXX_LINK_FLAGS "" ) - -#################################################################### - -# Meaning of flags -# ---------------- -# todo - diff --git a/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake b/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake deleted file mode 100755 index c0b618cc..00000000 --- a/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake +++ /dev/null @@ -1,43 +0,0 @@ -# (C) Copyright 2018 UCAR. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -#################################################################### -# FLAGS COMMON TO ALL BUILD TYPES -#################################################################### - -set( CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fdefault-real-8 -fdefault-double-8 -Waliasing -fcray-pointer -fconvert=big-endian -ffree-line-length-none -fno-range-check -fbacktrace") - -#################################################################### -# RELEASE FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_RELEASE "-O3" ) - -#################################################################### -# DEBUG FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -fbounds-check -ffpe-trap=invalid,zero,overflow" ) - -#################################################################### -# BIT REPRODUCIBLE FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_BIT "-O2 -fbounds-check" ) - -#################################################################### -# LINK FLAGS -#################################################################### - -set( CMAKE_Fortran_LINK_FLAGS "" ) - -#################################################################### - -# Meaning of flags -# ---------------- -# -fstack-arrays : Allocate automatic arrays on the stack (needs large stacksize!!!) -# -funroll-all-loops : Unroll all loops -# -fcheck=bounds : Bounds checking - diff --git a/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake_test b/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake_test deleted file mode 100755 index 95f6209b..00000000 --- a/vector2tile/cmake/compiler_flags_GNU_Fortran.cmake_test +++ /dev/null @@ -1,45 +0,0 @@ -# (C) Copyright 2018 UCAR. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -#################################################################### -# FLAGS COMMON TO ALL BUILD TYPES -#################################################################### - -#jkim set( CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fdefault-real-8 -fdefault-double-8 -Waliasing -fcray-pointer -fconvert=big-endian -ffree-line-length-none -fno-range-check -fbacktrace") -set( CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fconvert=big-endian -fbacktrace -ffree-line-length-none -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -ffree-form") - -#################################################################### -# RELEASE FLAGS -#################################################################### - -#jkim set( CMAKE_Fortran_FLAGS_RELEASE "-O3" ) -set( CMAKE_Fortran_FLAGS_RELEASE " " ) - -#################################################################### -# DEBUG FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -fbounds-check -ffpe-trap=invalid,zero,overflow" ) - -#################################################################### -# BIT REPRODUCIBLE FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_BIT "-O2 -fbounds-check" ) - -#################################################################### -# LINK FLAGS -#################################################################### - -set( CMAKE_Fortran_LINK_FLAGS "" ) - -#################################################################### - -# Meaning of flags -# ---------------- -# -fstack-arrays : Allocate automatic arrays on the stack (needs large stacksize!!!) -# -funroll-all-loops : Unroll all loops -# -fcheck=bounds : Bounds checking - diff --git a/vector2tile/cmake/compiler_flags_Intel_Fortran.cmake b/vector2tile/cmake/compiler_flags_Intel_Fortran.cmake deleted file mode 100755 index 7eef4052..00000000 --- a/vector2tile/cmake/compiler_flags_Intel_Fortran.cmake +++ /dev/null @@ -1,41 +0,0 @@ -# (C) Copyright 2018 UCAR. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -#################################################################### -# FLAGS COMMON TO ALL BUILD TYPES -#################################################################### - -set( CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -convert big_endian -fno-alias -auto -safe-cray-ptr -ftz -assume byterecl -i4 -r8 -nowarn -sox -traceback -msse2" ) - -#################################################################### -# RELEASE FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_RELEASE "-O3 -debug minimal -fp-model source" ) - -#################################################################### -# DEBUG FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_DEBUG "-g -O0 -check -check noarg_temp_created -check nopointer -warn -warn noerrors -fpe0 -ftrapuv" ) - -#################################################################### -# BIT REPRODUCIBLE FLAGS -#################################################################### - -set( CMAKE_Fortran_FLAGS_BIT "-O2 -debug minimal -fp-model source" ) - -#################################################################### -# LINK FLAGS -#################################################################### - -set( CMAKE_Fortran_LINK_FLAGS "" ) - -#################################################################### - -# Meaning of flags -# ---------------- -# todo - diff --git a/vector2tile/cmake/vector2tile_compiler_flags.cmake b/vector2tile/cmake/vector2tile_compiler_flags.cmake deleted file mode 100755 index 8aebfaec..00000000 --- a/vector2tile/cmake/vector2tile_compiler_flags.cmake +++ /dev/null @@ -1,23 +0,0 @@ -# (C) Copyright 2018 UCAR. -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -#add_definitions ( -Duse_libMPI -Duse_netCDF -DSPMD -DNXGLOB=4) -add_definitions ( -Duse_libMPI -Duse_netCDF -Dncdf -Dgather_scatter_barrier) - -####################################################################################### -# Fortran -####################################################################################### - -if( CMAKE_Fortran_COMPILER_ID MATCHES "GNU" ) - include( compiler_flags_GNU_Fortran ) -elseif( CMAKE_Fortran_COMPILER_ID MATCHES "Intel" ) - include( compiler_flags_Intel_Fortran ) -elseif( CMAKE_Fortran_COMPILER_ID MATCHES "XL" ) - include( compiler_flags_XL_Fortran ) -elseif( CMAKE_Fortran_COMPILER_ID MATCHES "Cray" ) - include( compiler_flags_Cray_Fortran ) -else() - message( STATUS "Fortran compiler with ID ${CMAKE_Fortran_COMPILER_ID} will be used with CMake default options") -endif() diff --git a/vector2tile/config/user_build_config.gfortran.serial b/vector2tile/config/user_build_config.gfortran.serial deleted file mode 100644 index a7191d5c..00000000 --- a/vector2tile/config/user_build_config.gfortran.serial +++ /dev/null @@ -1,11 +0,0 @@ - -#=============================================================================== -# Placeholder options for Mac gfortran -#=============================================================================== - - COMPILERF90 = /opt/local/bin/gfortran-mp-11 - FREESOURCE = - F90FLAGS = - NETCDFMOD = -I/opt/local/include - NETCDFLIB = -L/opt/local/lib -lnetcdf -lnetcdff - diff --git a/vector2tile/config/user_build_config.hera b/vector2tile/config/user_build_config.hera deleted file mode 100644 index 6ed29784..00000000 --- a/vector2tile/config/user_build_config.hera +++ /dev/null @@ -1,10 +0,0 @@ - -#=============================================================================== -# Placeholder options for hera -#=============================================================================== - - COMPILERF90 = ifort - FREESOURCE = - F90FLAGS = - NETCDFMOD = -I/apps/netcdf/4.7.0/intel/18.0.5.274/include - NETCDFLIB = -L/apps/netcdf/4.7.0/intel/18.0.5.274/lib -lnetcdf -lnetcdff diff --git a/vector2tile/configure b/vector2tile/configure deleted file mode 100755 index 24aad730..00000000 --- a/vector2tile/configure +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/perl - - if($#ARGV == 0) { - $response = shift(@ARGV) ; - print("Configure soil test environment: $response \n"); - }else { - print "Please select from following predefined environments: \n\n"; - - print " 1. hera \n"; - print " 2. orion \n"; - print " 3. gfortran compiler serial \n"; - print " 0. exit only \n"; - - printf "\nEnter selection : " ; - - $response = ; - chop($response); - } - - if ($response == 1) { - # Hera settings - system "cp config/user_build_config.hera user_build_config"; - print "\n load necessary modules:\n"; - print "\n module load 2022.2.0 netcdf/4.7.0 \n\n\n"; - } - - elsif ($response == 2) { - # Orion settings - system "cp config/user_build_config.orion user_build_config"; - print "\n load necessary modules:\n"; - print "\n module load intel netcdf \n\n\n"; - } - - elsif ($response == 3) { - # Mike's Mac settings - system "cp config/user_build_config.gfortran.serial user_build_config"; - } - - else {print "no selection $response\n"; last} diff --git a/vector2tile/namelist.lndp2vector b/vector2tile/namelist.lndp2vector deleted file mode 100644 index 92ade568..00000000 --- a/vector2tile/namelist.lndp2vector +++ /dev/null @@ -1,52 +0,0 @@ -&run_setup - -!------------------- common ------------------- -! Direction of conversion: either "vector2tile" or "tile2vector" for restart file -! "lndp2tile" or "lndp2vector" for perturbation - - direction = "lndp2vector" - -! FV3 resolution and path to oro files for restart/perturbation conversion - - tile_size = 96 - tile_path = "/scratch1/NCEPDEV/stmp2/Michael.Barlage/models/vector/v2t_data/tile_files/C96.mx100_frac/" - tile_fstub = "oro_C96.mx100" - -!------------------- only restart conversion ------------------- -! Time stamp for conversion for restart conversion - - restart_date = "2019-09-30 23:00:00" - -! Path for static file - static_filename="/scratch1/NCEPDEV/stmp2/Michael.Barlage/forcing/C96/static/ufs-land_C96_static_fields.nc" - -! Location of vector restart file (vector2tile direction) - - vector_restart_path = "junk" - -! Location of tile restart files (tile2vector direction) - - tile_restart_path = "junk" - -! Path for converted files; if same as tile/vector path, files may be overwritten - - output_path = "./" - -!------------------- only perturbation mapping ------------------- -! layout, options: 1x1, 1x4, 4x1, 2x2, an input settings for generating the perturbation file - - lndp_layout = "1x1" - -! input files - - lndp_input_file = "./stochy_out1/workg_T162_984x488.tile01.nc" - -! output files - - lndp_output_file = "./output_ens001.nc" - -! land perturbation variable list - - lndp_var_list='vgf' - -/ diff --git a/vector2tile/namelist.vector2tile b/vector2tile/namelist.vector2tile deleted file mode 100644 index a7a0b680..00000000 --- a/vector2tile/namelist.vector2tile +++ /dev/null @@ -1,52 +0,0 @@ -&run_setup - -!------------------- common ------------------- -! Direction of conversion: either "vector2tile" or "tile2vector" for restart file -! "lndp2tile" or "lndp2vector" for perturbation - - direction = "vector2tile" - -! FV3 resolution and path to oro files for restart/perturbation conversion - - tile_size = 96 - tile_path = "/scratch1/NCEPDEV/stmp2/Michael.Barlage/models/vector/v2t_data/tile_files/C96.mx100_frac/" - tile_fstub = "oro_C96.mx100" - -!------------------- only restart conversion ------------------- -! Time stamp for conversion for restart conversion - - restart_date = "2019-09-30 23:00:00" - -! Path for static file - static_filename="/scratch1/NCEPDEV/stmp2/Michael.Barlage/forcing/C96/static/ufs-land_C96_static_fields.nc" - -! Location of vector restart file (vector2tile direction) - - vector_restart_path = "/scratch1/NCEPDEV/stmp2/Michael.Barlage/models/vector/v2t_data/restart/" - -! Location of tile restart files (tile2vector direction) - - tile_restart_path = "/scratch1/NCEPDEV/stmp2/Michael.Barlage/models/vector/v2t_data/workshop/" - -! Path for converted files; if same as tile/vector path, files may be overwritten - - output_path = "/scratch1/NCEPDEV/stmp2/Michael.Barlage/models/vector/v2t_data/workshop/" - -!------------------- only perturbation mapping ------------------- -! layout, options: 1x4, 4x1, 2x2, an input settings for generating the perturbation file - - lndp_layout = "1x4" - -! input files - - lndp_input_file = "/scratch2/NCEPDEV/land/data/DA/ensemble_pert/workg_T162_984x488.tileXX.nc" - -! output files - - lndp_output_file = "./output.nc" - -! land perturbation variable list - - lndp_var_list='vgf','smc' - -/ diff --git a/vector2tile/namelist_mod.f90 b/vector2tile/namelist_mod.f90 deleted file mode 100644 index 4fbca14b..00000000 --- a/vector2tile/namelist_mod.f90 +++ /dev/null @@ -1,86 +0,0 @@ -module namelist_mod - implicit none - - integer, parameter :: max_n_var_lndp = 20 - type namelist_type - character*256 :: namelist_name = "" - character*11 :: direction = "" - character*256 :: tile_path = "" - character*256 :: tile_fstub = "" - integer :: tile_size - character*19 :: restart_date = "" - character*256 :: vector_restart_path = "" - character*256 :: tile_restart_path = "" - character*256 :: output_path = "" - character*256 :: static_filename = "" - character*3 :: lndp_layout = "" - character*256 :: lndp_input_file = "" - character*256 :: lndp_output_file = "" - character(len=128) :: lndp_var_list(max_n_var_lndp) - integer :: n_var_lndp - end type namelist_type - -contains - - subroutine ReadNamelist(namelist) - - type(namelist_type) :: namelist - character*11 :: direction - character*256 :: tile_path - character*256 :: tile_fstub - integer :: tile_size - character*19 :: restart_date - character*256 :: vector_restart_path - character*256 :: tile_restart_path - character*256 :: output_path - character*256 :: static_filename - character*3 :: lndp_layout - character*256 :: lndp_input_file - character*256 :: lndp_output_file - character(len=128) :: lndp_var_list(max_n_var_lndp) - integer :: n_var_lndp - integer :: k - - namelist / run_setup / direction, tile_path, tile_fstub, tile_size, restart_date, vector_restart_path, & - tile_restart_path, output_path, static_filename, lndp_layout, & - lndp_input_file, lndp_output_file, lndp_var_list, n_var_lndp - - lndp_var_list = 'XXX' - - open(30, file=namelist%namelist_name, form="formatted") - read(30, run_setup) - close(30) - - namelist%direction = direction - namelist%tile_path = tile_path - namelist%tile_fstub = tile_fstub - namelist%tile_size = tile_size - namelist%restart_date = restart_date - namelist%vector_restart_path = vector_restart_path - namelist%tile_restart_path = tile_restart_path - namelist%output_path = output_path - namelist%static_filename = static_filename - - namelist%lndp_layout = lndp_layout - namelist%lndp_input_file = lndp_input_file - namelist%lndp_output_file = lndp_output_file - - n_var_lndp= 0 - do k =1,size(lndp_var_list) - if (trim(lndp_var_list(k)) .EQ. 'XXX') then - cycle - else - n_var_lndp=n_var_lndp+1 - namelist%lndp_var_list(n_var_lndp) = lndp_var_list(k) - endif - enddo - namelist%n_var_lndp = n_var_lndp - if (n_var_lndp > max_n_var_lndp) then - print*, 'ERROR: land perturbation requested for too many parameters', & - 'increase max_n_var_lndp' - stop 10 - endif - - end subroutine ReadNamelist - -end module namelist_mod diff --git a/vector2tile/pull_request_template.md b/vector2tile/pull_request_template.md deleted file mode 100644 index 8eae0621..00000000 --- a/vector2tile/pull_request_template.md +++ /dev/null @@ -1,32 +0,0 @@ -PR Instructions: -1. Provide details under all headings below. -2. Assign Clara and one other person as reviewers. -3. If the PR is not ready for merging, add the "DRAFT/DO NOT MERGE" label. -4. When a PR is ready to merge, remove the "DRAFT/DO NOT MERGE" and email Clara. -5. Before requesting that the PR be merged, complete the checklist below. - -Notes on preparing PR, using git can be found in README_git - -## Describe your changes -Summarise all code changes included in PR: - -List any associated PRs in the submodules. - - -## Issue ticket number and link -List the git Issue that this PR addresses: - - -## Test output -Is this PR expected to pass the DA_IMS_test (ie., does it change the output)? - -Does it pass the DA_IMS_test? - -If changes to the test results are expected, what are these changes? Provide a link to the output directory when running the test: - -## Checklist before requesting a review -- [ ] My branch being merged is up to date with the latest develop. -- [ ] I have performed a self-review of my code by examining the differences that will be merged. -- [ ] I have not made any unnecessary code changes / changed any default behavior. -- [ ] My code passes the DA_IMS_test, or differences can be explained. - diff --git a/vector2tile/user_build_config b/vector2tile/user_build_config deleted file mode 100644 index 6ed29784..00000000 --- a/vector2tile/user_build_config +++ /dev/null @@ -1,10 +0,0 @@ - -#=============================================================================== -# Placeholder options for hera -#=============================================================================== - - COMPILERF90 = ifort - FREESOURCE = - F90FLAGS = - NETCDFMOD = -I/apps/netcdf/4.7.0/intel/18.0.5.274/include - NETCDFLIB = -L/apps/netcdf/4.7.0/intel/18.0.5.274/lib -lnetcdf -lnetcdff diff --git a/vector2tile/vector2tile_driver.f90 b/vector2tile/vector2tile_driver.f90 deleted file mode 100644 index 988d49d3..00000000 --- a/vector2tile/vector2tile_driver.f90 +++ /dev/null @@ -1,55 +0,0 @@ -program vector2tile_driver - - use namelist_mod - use vector2tile_restart_mod - use vector2tile_perturbation_mod - implicit none - - type(namelist_type) :: namelist - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Get namelist file name from command line -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call get_command_argument(1, namelist%namelist_name) - if(namelist%namelist_name == "") then - print *, "add namelist to the command line: " - stop 10 - endif - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read namelist information -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call ReadNamelist(namelist) - - print*, "conversion direction: ",namelist%direction - - if(namelist%direction /= "tile2vector" .and. namelist%direction /= "vector2tile" .and. & - namelist%direction /= "lndp2vector" .and. namelist%direction /= "lndp2tile") then - print*, "conversion direction: ",namelist%direction, " not recognized" - stop 10 - end if - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Decide the pathway -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - select case (namelist%direction) - - case ("tile2vector", "vector2tile") - - write(*,*) "Option: "//trim(namelist%direction) - call vector2tile_restart(namelist) - - case ("lndp2vector", "lndp2tile") - - write(*,*) "Option: "//trim(namelist%direction) - call mapping_perturbation(namelist) - - case default - - write(*,*) "choose a valid conversion direction" - - end select -end program vector2tile_driver diff --git a/vector2tile/vector2tile_perturbation_mod.f90 b/vector2tile/vector2tile_perturbation_mod.f90 deleted file mode 100644 index 1b0f977a..00000000 --- a/vector2tile/vector2tile_perturbation_mod.f90 +++ /dev/null @@ -1,347 +0,0 @@ -module vector2tile_perturbation_mod - - use netcdf - use namelist_mod - implicit none - -contains - - subroutine mapping_perturbation(namelist) - type(namelist_type) :: namelist - real, allocatable :: var_vector(:), lon_vector(:), lat_vector(:) - real, allocatable :: var_tile(:,:,:), lon_tile(:,:,:), lat_tile(:,:,:) - real, allocatable :: land_frac_tile(:,:,:) - character*256 :: vector_filename - character*256 :: tile_filename - character*256 :: input_filename - character*256 :: output_filename - character*2 :: tile1, tile2 - real, allocatable :: tmp2d(:,:) - integer :: filename_length - integer :: vector_length = 0 - integer :: layout_x, layout_y, nx, ny - integer :: itile, ix, iy, iloc, ivar - integer :: i, j, m, n, i1, i2, j1, j2, t2 - integer :: ncid, dimid, varid, status - integer :: dim_id_xdim, dim_id_ydim, dim_id_time - integer :: ncid_landp, ncid_vec, ncid_tile(6) - logical :: file_exists - - if(trim(namelist%lndp_layout) == '1x4') then - layout_x = 1 - layout_y = 4 - else if(trim(namelist%lndp_layout) == '2x2') then - layout_x = 2 - layout_y = 2 - else if(trim(namelist%lndp_layout) == '4x1') then - layout_x = 4 - layout_y = 1 - else if(trim(namelist%lndp_layout) == '1x1') then - layout_x = 1 - layout_y = 1 - else - print*, "layout: ",namelist%lndp_layout, " not recognized" - stop 10 - endif - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Allocate tile variables -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - allocate(var_tile( namelist%tile_size,namelist%tile_size,6)) - allocate(lon_tile( namelist%tile_size,namelist%tile_size,6)) - allocate(lat_tile( namelist%tile_size,namelist%tile_size,6)) - allocate(land_frac_tile(namelist%tile_size,namelist%tile_size,6)) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read FV3 tile information -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - do itile = 1, 6 - - if(namelist%tile_size < 100) then - write(tile_filename,'(a5,i2,a11,i1,a3)') "oro_C", namelist%tile_size, ".mx100.tile", itile, ".nc" - elseif(namelist%tile_size < 1000) then - write(tile_filename,'(a5,i3,a11,i1,a3)') "oro_C", namelist%tile_size, ".mx100.tile", itile, ".nc" - elseif(namelist%tile_size < 10000) then - write(tile_filename,'(a5,i4,a11,i1,a3)') "oro_C", namelist%tile_size, ".mx100.tile", itile, ".nc" - else - print *, "unknown tile size" - stop 10 - end if - - tile_filename = trim(namelist%tile_path)//trim(tile_filename) - - inquire(file=tile_filename, exist=file_exists) - - if(.not.file_exists) then - print*, trim(tile_filename), " does not exist" - print*, "In routine mapping_perturbation: check paths and file name" - stop 10 - end if - - status = nf90_open(tile_filename, NF90_NOWRITE, ncid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_inq_varid(ncid, "land_frac", varid) - status = nf90_get_var(ncid, varid , land_frac_tile(:,:,itile)) - - status = nf90_close(ncid) - - vector_length = vector_length + count(land_frac_tile(:,:,itile) > 0) - - end do - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Allocate vector variables -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - allocate(var_vector(vector_length)) - allocate(lon_vector(vector_length)) - allocate(lat_vector(vector_length)) - - nx = namelist%tile_size/layout_x - ny = namelist%tile_size/layout_y - - if(namelist%n_var_lndp > 0) then - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Define the output file -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - if(trim(namelist%direction) == "lndp2vector") then - - output_filename = namelist%lndp_output_file - status = nf90_create(output_filename, NF90_CLOBBER, ncid_vec) - if (status /= nf90_noerr) call handle_err(status) - -! Define dimensions in the file. - - status = nf90_def_dim(ncid_vec, "location", vector_length, dim_id_xdim) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid_vec, "Time", NF90_UNLIMITED, dim_id_time) - if (status /= nf90_noerr) call handle_err(status) - -! Define lat and lon - status = nf90_def_var(ncid_vec, 'lon_vec', & - NF90_FLOAT, (/dim_id_xdim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_var(ncid_vec, 'lat_vec', & - NF90_FLOAT, (/dim_id_xdim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - -! Define variables in the file. - do ivar = 1, namelist%n_var_lndp - status = nf90_def_var(ncid_vec, namelist%lndp_var_list(ivar), & - NF90_FLOAT, (/dim_id_xdim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - enddo - status = nf90_enddef(ncid_vec) - - else if(trim(namelist%direction) == "lndp2tile" ) then - - filename_length = len_trim(namelist%lndp_output_file) - do itile = 1, 6 - write(tile1,fmt='(I2.2)') itile - output_filename = trim(namelist%lndp_output_file(1:filename_length-5))//trim(tile1)//'.nc' - status = nf90_create(output_filename, NF90_CLOBBER, ncid_tile(itile)) - if (status /= nf90_noerr) call handle_err(status) - -! Define dimensions in the file. - - status = nf90_def_dim(ncid_tile(itile), "xaxis_1", namelist%tile_size , dim_id_xdim) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid_tile(itile), "yaxis_1", namelist%tile_size , dim_id_ydim) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid_tile(itile), "Time", NF90_UNLIMITED, dim_id_time) - if (status /= nf90_noerr) call handle_err(status) - -! Define lat and lon in the file - status = nf90_def_var(ncid_tile(itile), 'lon_tile', & - NF90_FLOAT, (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_var(ncid_tile(itile), 'lat_tile', & - NF90_FLOAT, (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - -! Define variables in the file. - do ivar = 1, namelist%n_var_lndp - status = nf90_def_var(ncid_tile(itile), namelist%lndp_var_list(ivar), & - NF90_FLOAT, (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - enddo - status = nf90_enddef(ncid_tile(itile)) - enddo - endif - - allocate(tmp2d(nx,ny)) - - do ivar = 1, namelist%n_var_lndp - t2 = 1 - filename_length = len_trim(namelist%lndp_input_file) - do itile = 1, 6 - i1=1 - i2=i1+nx-1 - j1=1 - j2=j1+ny-1 - do j=1,layout_y - do i=1,layout_x - write(tile2,fmt='(I2.2)') t2 - if(t2 > 1) then - i1=i1+nx - i2=i2+nx - if (i2 .GT. namelist%tile_size) then - i1=1 - i2=i1+nx-1 - endif - endif - input_filename = trim(namelist%lndp_input_file(1:filename_length-5))//trim(tile2)//'.nc' - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! Read the perturbation pattern - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - status = nf90_open(input_filename, NF90_NOWRITE, ncid_landp) - status = nf90_inq_varid(ncid_landp, namelist%lndp_var_list(ivar), varid) - if (status /= nf90_noerr) then - print *, trim(namelist%lndp_var_list(ivar))//' variable missing from perturbation file' - call handle_err(status) - endif - - status = nf90_get_var(ncid_landp, varid, tmp2d, start = (/1,1,1/), count = (/nx, ny, 1/)) - - do m = i1, i2 - do n = j1, j2 - var_tile(m,n,itile) = tmp2d(m-i1+1,n-j1+1) - enddo - enddo - - ! get the lat and lon - if ( ivar == 1 ) then - status = nf90_inq_varid(ncid_landp, 'grid_lon', varid) - if (status /= nf90_noerr) then - print *, trim('grid lon variable missing from perturbation file') - call handle_err(status) - endif - - status = nf90_get_var(ncid_landp, varid, tmp2d, start = (/1,1,1/), count = (/nx, ny, 1/)) - - do m = i1, i2 - do n = j1, j2 - lon_tile(m,n,itile) = tmp2d(m-i1+1,n-j1+1) - enddo - enddo - - ! get the lat and lat - status = nf90_inq_varid(ncid_landp, 'grid_lat', varid) - if (status /= nf90_noerr) then - print *, trim('grid lat variable missing from perturbation file') - call handle_err(status) - endif - - status = nf90_get_var(ncid_landp, varid, tmp2d, start = (/1,1,1/), count = (/nx, ny, 1/)) - - do m = i1, i2 - do n = j1, j2 - lat_tile(m,n,itile) = tmp2d(m-i1+1,n-j1+1) - enddo - enddo - - endif - - t2 = t2+1 - - enddo - - j1=j1+ny - j2=j2+ny - - if (j2 .GT. namelist%tile_size) then - j1=1 - j2=j1+ny-1 - endif - - enddo - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! Write the perturbation pattern for the tile files - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - if(trim(namelist%direction) == "lndp2tile") then - status = nf90_inq_varid(ncid_tile(itile), namelist%lndp_var_list(ivar), varid) - status = nf90_put_var(ncid_tile(itile), varid , var_tile(:,:,itile), & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - if (ivar==1) then - status = nf90_inq_varid(ncid_tile(itile), 'lon_grid', varid) - status = nf90_put_var(ncid_tile(itile), varid , lon_tile(:,:,itile), & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - status = nf90_inq_varid(ncid_tile(itile), 'lat_grid', varid) - status = nf90_put_var(ncid_tile(itile), varid , lat_tile(:,:,itile), & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - endif - endif - - enddo ! for each tile - - - iloc = 0 - do itile = 1, 6 - do j = 1, namelist%tile_size - do i = 1, namelist%tile_size - if(land_frac_tile(i,j,itile) > 0.0) then - iloc = iloc + 1 - var_vector(iloc) = var_tile(i,j,itile) - if (ivar==1) then - lon_vector(iloc) = lon_tile(i,j,itile) - lat_vector(iloc) = lat_tile(i,j,itile) - endif - endif - enddo - enddo - enddo ! for each tile - - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - ! Write the perturbation pattern for the vector file - !!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - if(trim(namelist%direction) == "lndp2vector") then - status = nf90_inq_varid(ncid_vec, namelist%lndp_var_list(ivar), varid) - status = nf90_put_var(ncid_vec, varid , var_vector(:), & - start = (/1,1/), count = (/vector_length, 1/)) - - if (ivar == 1) then - status = nf90_inq_varid(ncid_vec, 'lon_vec', varid) - status = nf90_put_var(ncid_vec, varid , lon_vector(:), & - start = (/1,1/), count = (/vector_length, 1/)) - status = nf90_inq_varid(ncid_vec, 'lat_vec', varid) - status = nf90_put_var(ncid_vec, varid , lat_vector(:), & - start = (/1,1/), count = (/vector_length, 1/)) - endif - endif - - enddo ! for each variable - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Close the netcdf file -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - if(trim(namelist%direction) == "lndp2vector") then - status = nf90_close(ncid_vec) - else if(trim(namelist%direction) == "lndp2tile") then - do itile = 1, 6 - status = nf90_close(ncid_tile(itile)) - enddo - endif - endif - end subroutine mapping_perturbation - - subroutine handle_err(status) - use netcdf - integer, intent ( in) :: status - - if(status /= nf90_noerr) then - print *, trim(nf90_strerror(status)) - stop 10 - end if - end subroutine handle_err - -end module vector2tile_perturbation_mod diff --git a/vector2tile/vector2tile_restart_mod.f90 b/vector2tile/vector2tile_restart_mod.f90 deleted file mode 100644 index 1679294c..00000000 --- a/vector2tile/vector2tile_restart_mod.f90 +++ /dev/null @@ -1,929 +0,0 @@ -module vector2tile_restart_mod - - use namelist_mod - use netcdf - implicit none - - type vector_type - double precision, allocatable :: swe (:) - double precision, allocatable :: snow_depth (:) - double precision, allocatable :: active_snow_layers (:) - double precision, allocatable :: swe_previous (:) - double precision, allocatable :: snow_soil_interface(:,:) - double precision, allocatable :: temperature_snow (:,:) - double precision, allocatable :: snow_ice_layer (:,:) - double precision, allocatable :: snow_liq_layer (:,:) - double precision, allocatable :: temperature_soil (:,:) -! needed for IMSaggregate_mod - double precision, allocatable :: vegetation_type(:) -! needed by JEDI to mask out land-ice - double precision, allocatable :: soil_moisture_total(:,:) -! needed for JEDI QC of SMAP data - double precision, allocatable :: soil_moisture_liquid(:,:) - double precision, allocatable :: temperature_ground (:) - end type vector_type - - type tile_type - double precision, allocatable :: swe (:,:,:) - double precision, allocatable :: snow_depth (:,:,:) - double precision, allocatable :: active_snow_layers (:,:,:) - double precision, allocatable :: swe_previous (:,:,:) - double precision, allocatable :: snow_soil_interface(:,:,:,:) - double precision, allocatable :: temperature_snow (:,:,:,:) - double precision, allocatable :: snow_ice_layer (:,:,:,:) - double precision, allocatable :: snow_liq_layer (:,:,:,:) - double precision, allocatable :: temperature_soil (:,:,:,:) - real, allocatable :: land_frac (:,:,:) - double precision, allocatable :: soil_moisture_total(:,:,:,:) - double precision, allocatable :: vegetation_type(:,:,:) -! needed by add increments - double precision, allocatable :: slmsk (:, :, :) -! needed for JEDI QC of SMAP data - double precision, allocatable :: soil_moisture_liquid (:,:,:,:) - double precision, allocatable :: temperature_ground (:,:,:) - end type tile_type - -contains - - subroutine vector2tile_restart(namelist) - type(namelist_type) :: namelist - type(vector_type) :: vector - type(tile_type) :: tile - character*256 :: vector_filename - character*300 :: tile_filename - character*19 :: date - integer :: vector_length = 0 - integer :: yyyy,mm,dd,hh,nn,ss - integer :: itile, ix, iy, iloc - integer :: ncid, dimid, varid, status - logical :: file_exists - read(namelist%restart_date( 1: 4),'(i4.4)') yyyy - read(namelist%restart_date( 6: 7),'(i2.2)') mm - read(namelist%restart_date( 9:10),'(i2.2)') dd - read(namelist%restart_date(12:13),'(i2.2)') hh - read(namelist%restart_date(15:16),'(i2.2)') nn - read(namelist%restart_date(18:19),'(i2.2)') ss - - write(date,'(i4,a1,i2.2,a1,i2.2,a1,i2.2,a1,i2.2,a1,i2.2)') & - yyyy, "-", mm, "-", dd, "_", hh, "-", nn, "-", ss - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Allocate tile variables -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - allocate(tile%swe (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%snow_depth (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%active_snow_layers (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%swe_previous (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%snow_soil_interface(namelist%tile_size,namelist%tile_size,7,6)) - allocate(tile%temperature_snow (namelist%tile_size,namelist%tile_size,3,6)) - allocate(tile%snow_ice_layer (namelist%tile_size,namelist%tile_size,3,6)) - allocate(tile%snow_liq_layer (namelist%tile_size,namelist%tile_size,3,6)) - allocate(tile%temperature_soil (namelist%tile_size,namelist%tile_size,4,6)) - allocate(tile%soil_moisture_total (namelist%tile_size,namelist%tile_size,4,6)) - allocate(tile%land_frac (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%slmsk (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%vegetation_type (namelist%tile_size,namelist%tile_size,6)) - allocate(tile%soil_moisture_liquid (namelist%tile_size,namelist%tile_size,4,6)) - allocate(tile%temperature_ground (namelist%tile_size,namelist%tile_size,6)) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read FV3 tile information -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - do itile = 1, 6 - write(tile_filename,'(a5,i1,a3)') ".tile", itile, ".nc" - - tile_filename = trim(namelist%tile_path)//trim(namelist%tile_fstub)//trim(adjustl(tile_filename)) - inquire(file=trim(tile_filename), exist=file_exists) - - if(.not.file_exists) then - print*, trim(tile_filename), " does not exist1" - print*, "Check paths and file name" - stop 10 - end if - - status = nf90_open(trim(tile_filename), NF90_NOWRITE, ncid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_inq_varid(ncid, "land_frac", varid) - status = nf90_get_var(ncid, varid , tile%land_frac(:,:,itile)) - - status = nf90_close(ncid) - - vector_length = vector_length + count(tile%land_frac(:,:,itile) > 0) - - end do - - print*, "The FV3 tiles report ",vector_length, "land grids" - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Allocate vector variables -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - allocate(vector%swe (vector_length)) - allocate(vector%snow_depth (vector_length)) - allocate(vector%active_snow_layers (vector_length)) - allocate(vector%swe_previous (vector_length)) - allocate(vector%snow_soil_interface(vector_length,7)) - allocate(vector%temperature_snow (vector_length,3)) - allocate(vector%snow_ice_layer (vector_length,3)) - allocate(vector%snow_liq_layer (vector_length,3)) - allocate(vector%temperature_soil (vector_length,4)) - allocate(vector%soil_moisture_total (vector_length,4)) - allocate(vector%vegetation_type (vector_length)) - allocate(vector%soil_moisture_liquid (vector_length,4)) - allocate(vector%temperature_ground (vector_length)) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Direction of transfer branch -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - if(namelist%direction == "vector2tile") then - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read vector restart file -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call ReadVectorRestart(namelist, date, vector, vector_length) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Transfer vector to tiles -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - ! explicitly initialize to 0. - tile%slmsk=0. - - iloc = 0 - do itile = 1, 6 - do iy = 1, namelist%tile_size - do ix = 1, namelist%tile_size - if(tile%land_frac(ix,iy,itile) > 0.0) then - iloc = iloc + 1 - tile%swe(ix,iy,itile) = vector%swe(iloc) - tile%vegetation_type(ix,iy,itile) = vector%vegetation_type(iloc) - tile%snow_depth(ix,iy,itile) = vector%snow_depth(iloc) - tile%active_snow_layers(ix,iy,itile) = vector%active_snow_layers(iloc) - tile%swe_previous(ix,iy,itile) = vector%swe_previous(iloc) - tile%snow_soil_interface(ix,iy,:,itile) = vector%snow_soil_interface(iloc,:) - tile%temperature_snow(ix,iy,:,itile) = vector%temperature_snow(iloc,:) - tile%snow_ice_layer(ix,iy,:,itile) = vector%snow_ice_layer(iloc,:) - tile%snow_liq_layer(ix,iy,:,itile) = vector%snow_liq_layer(iloc,:) - tile%temperature_soil(ix,iy,:,itile) = vector%temperature_soil(iloc,:) - tile%soil_moisture_total(ix,iy,:,itile) = vector%soil_moisture_total(iloc,:) - tile%slmsk(ix,iy,itile) = 1. - tile%soil_moisture_liquid(ix,iy,:,itile)= vector%soil_moisture_liquid(iloc,:) - tile%temperature_ground(ix,iy,itile) = vector%temperature_ground(iloc) - end if - - end do - end do - end do - - print*, "Transferred ",iloc, "land grids" - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Write FV3 tile file -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call WriteTileRestart(namelist, date, tile) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! tile2vector branch -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - elseif(namelist%direction == "tile2vector") then - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read tile restart files -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call ReadTileRestart(namelist, date, tile) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Transfer tile to vector -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - iloc = 0 - do itile = 1, 6 - do iy = 1, namelist%tile_size - do ix = 1, namelist%tile_size - - if(tile%land_frac(ix,iy,itile) > 0.0) then - iloc = iloc + 1 - vector%swe(iloc) = tile%swe(ix,iy,itile) - vector%snow_depth(iloc) = tile%snow_depth(ix,iy,itile) - vector%active_snow_layers(iloc) = tile%active_snow_layers(ix,iy,itile) - vector%swe_previous(iloc) = tile%swe_previous(ix,iy,itile) - vector%snow_soil_interface(iloc,:) = tile%snow_soil_interface(ix,iy,:,itile) - vector%temperature_snow(iloc,:) = tile%temperature_snow(ix,iy,:,itile) - vector%snow_ice_layer(iloc,:) = tile%snow_ice_layer(ix,iy,:,itile) - vector%snow_liq_layer(iloc,:) = tile%snow_liq_layer(ix,iy,:,itile) - vector%temperature_soil(iloc,:) = tile%temperature_soil(ix,iy,:,itile) - vector%soil_moisture_total(iloc,:) = tile%soil_moisture_total(ix,iy,:,itile) - vector%soil_moisture_liquid(iloc,:)= tile%soil_moisture_liquid(ix,iy,:,itile) - vector%temperature_ground(iloc) = tile%temperature_ground(ix,iy,itile) - end if - - end do - end do - end do - - print*, "Transferred ",iloc, "land grids" - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Write FV3 tile file -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call WriteVectorRestart(namelist, date, vector, vector_length) - - end if ! "vector2tile" or "tile2vector" branch - - end subroutine vector2tile_restart - - subroutine ReadVectorRestart(namelist, date, vector, vector_length) - - use netcdf - - type(namelist_type) :: namelist - type(vector_type) :: vector - character*19 :: date - integer :: vector_length - character*256 :: vector_filename, filename - integer :: ncid, dimid, varid, status - logical :: file_exists - -!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Create vector file name -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - write(vector_filename,'(a17,a19,a3)') "ufs_land_restart.", date, ".nc" - - filename = trim(namelist%vector_restart_path)//trim(vector_filename) - - inquire(file=filename, exist=file_exists) - - if(.not.file_exists) then - print*, trim(filename), " does not exist2" - print*, "Check paths and file name" - stop 10 - end if - - print*, "Reading vector file: ", trim(filename) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Check the vector length, fail if not consistent with tile-calculated length -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call ReadVectorLength(filename, vector_length) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read the vector fields -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - status = nf90_open(filename, NF90_NOWRITE, ncid) - - status = nf90_inq_varid(ncid, "snow_water_equiv", varid) - if (status /= nf90_noerr) then - print *, 'snow_water_equiv variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%swe , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "snow_depth", varid) - if (status /= nf90_noerr) then - print *, 'snow_depth variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%snow_depth , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "active_snow_levels", varid) - if (status /= nf90_noerr) then - print *, 'active_snow_levels variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%active_snow_layers , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "snow_water_equiv_old", varid) - if (status /= nf90_noerr) then - print *, 'snow_water_equiv_old variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%swe_previous, & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "temperature_snow", varid) - if (status /= nf90_noerr) then - print *, 'temperature_snow variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%temperature_snow , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "interface_depth", varid) - if (status /= nf90_noerr) then - print *, 'interface_depth variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%snow_soil_interface , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 7, 1/)) - - status = nf90_inq_varid(ncid, "snow_level_ice", varid) - if (status /= nf90_noerr) then - print *, 'snow_level_ice variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%snow_ice_layer , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "snow_level_liquid", varid) - if (status /= nf90_noerr) then - print *, 'snow_level_liquid variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%snow_liq_layer , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "temperature_soil", varid) - if (status /= nf90_noerr) then - print *, 'temperature_soil variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%temperature_soil , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4, 1/)) - - status = nf90_inq_varid(ncid, "soil_moisture_vol", varid) - if (status /= nf90_noerr) then - print *, 'soil_moisture_vol variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%soil_moisture_total , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4, 1/)) - - status = nf90_inq_varid(ncid, "soil_liquid_vol", varid) - if (status /= nf90_noerr) then - print *, 'soil_liquid_vol variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%soil_moisture_liquid , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4, 1/)) - - - status = nf90_inq_varid(ncid, "temperature_ground", varid) - if (status /= nf90_noerr) then - print *, 'temperature_ground variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%temperature_ground , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_close(ncid) - - ! read vegetation from static file - - filename = trim(namelist%static_filename) - - inquire(file=filename, exist=file_exists) - - if(.not.file_exists) then - print*, trim(filename), " does not exist3" - print*, "Check paths and file name" - stop 10 - end if - - status = nf90_open(filename, NF90_NOWRITE, ncid) - - status = nf90_inq_varid(ncid, "vegetation_category", varid) - if (status /= nf90_noerr) then - print *, 'vegetation_category missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , vector%vegetation_type, & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_close(ncid) - end subroutine ReadVectorRestart - - subroutine ReadTileRestart(namelist, date, tile) - - use netcdf - - type(namelist_type) :: namelist - type(tile_type) :: tile - character*19 :: date - character*256 :: tile_filename - integer :: ncid, dimid, varid, status - integer :: itile - logical :: file_exists - -!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Create tile file name -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - do itile = 1, 6 - write(tile_filename,'(a4,a2,a2,a1,a2,a18,i1,a3)') & - date(1:4), date(6:7), date(9:10),".",date(12:13), "0000.sfc_data.tile",itile,".nc" - - tile_filename = trim(namelist%tile_restart_path)//trim(tile_filename) - - inquire(file=tile_filename, exist=file_exists) - - if(.not.file_exists) then - print*, trim(tile_filename), " does not exist4" - print*, "Check paths and file name" - stop 10 - end if - - print*, "Reading tile file: ", trim(tile_filename) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Read the tile fields -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - status = nf90_open(tile_filename, NF90_NOWRITE, ncid) - if (status /= nf90_noerr) call handle_err(status) - -! Start reading restart file - - status = nf90_inq_varid(ncid, "sheleg", varid) - if (status /= nf90_noerr) then - print *, 'sheleg variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%swe(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "snwdph", varid) - if (status /= nf90_noerr) then - print *, 'snwdph variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%snow_depth(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "snowxy", varid) - if (status /= nf90_noerr) then - print *, 'snowxy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%active_snow_layers(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "sneqvoxy", varid) - if (status /= nf90_noerr) then - print *, 'sneqvoxy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%swe_previous(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "zsnsoxy", varid) - if (status /= nf90_noerr) then - print *, 'zsnoxy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%snow_soil_interface(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 7, 1/)) - - status = nf90_inq_varid(ncid, "tsnoxy", varid) - if (status /= nf90_noerr) then - print *, 'tsnoxy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%temperature_snow(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "snicexy", varid) - if (status /= nf90_noerr) then - print *, 'snicexy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%snow_ice_layer(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "snliqxy", varid) - if (status /= nf90_noerr) then - print *, 'snliqxy variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%snow_liq_layer(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "stc", varid) - if (status /= nf90_noerr) then - print *, 'stc variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%temperature_soil(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - - status = nf90_inq_varid(ncid, "smc", varid) - if (status /= nf90_noerr) then - print *, 'smc variable missing from vector file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%soil_moisture_total(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - - status = nf90_inq_varid(ncid, "slc", varid) - if (status /= nf90_noerr) then - print *, 'slc variable missing from tile file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%soil_moisture_liquid(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - - status = nf90_inq_varid(ncid, "tgxy", varid) - if (status /= nf90_noerr) then - print *, 'tgxy variable missing from tile file' - call handle_err(status) - endif - status = nf90_get_var(ncid, varid , tile%temperature_ground(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_close(ncid) - - end do - - end subroutine ReadTileRestart - - subroutine WriteVectorRestart(namelist, date, vector, vector_length) - - use netcdf - - type(namelist_type) :: namelist - type(vector_type) :: vector - character*19 :: date - integer :: vector_length - character*256 :: vector_filename - integer :: ncid, dimid, varid, status - logical :: file_exists - -!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Create vector file name -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - write(vector_filename,'(a17,a19,a3)') "ufs_land_restart.", date, ".nc" - - vector_filename = trim(namelist%output_path)//trim(vector_filename) - - inquire(file=vector_filename, exist=file_exists) - - if(.not.file_exists) then - print*, trim(vector_filename), " does not exist5" - print*, "Check paths and file name" - stop 10 - end if - - print*, "Writing vector file: ", trim(vector_filename) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Check the vector length, fail if not consistent with tile-calculated length -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - call ReadVectorLength(vector_filename, vector_length) - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -! Write the vector fields -!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - status = nf90_open(vector_filename, NF90_WRITE, ncid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_inq_varid(ncid, "snow_water_equiv", varid) - status = nf90_put_var(ncid, varid , vector%swe , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "snow_depth", varid) - status = nf90_put_var(ncid, varid , vector%snow_depth , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "active_snow_levels", varid) - status = nf90_put_var(ncid, varid , vector%active_snow_layers , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "snow_water_equiv_old", varid) - status = nf90_put_var(ncid, varid , vector%swe_previous, & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_inq_varid(ncid, "temperature_snow", varid) - status = nf90_put_var(ncid, varid , vector%temperature_snow , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "interface_depth", varid) - status = nf90_put_var(ncid, varid , vector%snow_soil_interface , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 7, 1/)) - - status = nf90_inq_varid(ncid, "snow_level_ice", varid) - status = nf90_put_var(ncid, varid , vector%snow_ice_layer , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "snow_level_liquid", varid) - status = nf90_put_var(ncid, varid , vector%snow_liq_layer , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 3, 1/)) - - status = nf90_inq_varid(ncid, "temperature_soil", varid) - status = nf90_put_var(ncid, varid , vector%temperature_soil , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4/)) - - status = nf90_inq_varid(ncid, "soil_moisture_vol", varid) - status = nf90_put_var(ncid, varid , vector%soil_moisture_total , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4/)) - - status = nf90_inq_varid(ncid, "soil_liquid_vol", varid) - status = nf90_put_var(ncid, varid , vector%soil_moisture_liquid , & - start = (/1 , 1, 1/) , & - count = (/vector_length, 4/)) - - status = nf90_inq_varid(ncid, "temperature_ground", varid) - status = nf90_put_var(ncid, varid , vector%temperature_ground , & - start = (/1,1/), count = (/vector_length, 1/)) - - status = nf90_close(ncid) - - end subroutine WriteVectorRestart - - subroutine WriteTileRestart(namelist, date, tile) - - use netcdf - - type(namelist_type) :: namelist - type(tile_type) :: tile - character*19 :: date - character*256 :: tile_filename - integer :: itile - integer :: ncid, varid, status, i - integer :: dim_id_xdim, dim_id_ydim, dim_id_soil, dim_id_snow, dim_id_snso, dim_id_time - - do itile = 1, 6 - - !write(tile_filename,'(a17,a19,a5,i1,a3)') "ufs_land_restart.", date, ".tile", itile, ".nc" - write(tile_filename,'(a4,a2,a2,a1,a2,a18,i1,a3)') & - date(1:4), date(6:7), date(9:10),".",date(12:13), "0000.sfc_data.tile",itile,".nc" - - tile_filename = trim(namelist%output_path)//trim(tile_filename) - - print*, "Writing tile file: ", trim(tile_filename) - - status = nf90_create(tile_filename, NF90_CLOBBER, ncid) - if (status /= nf90_noerr) call handle_err(status) - -! Define dimensions in the file. - - status = nf90_def_dim(ncid, "xaxis_1" , namelist%tile_size , dim_id_xdim) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid, "yaxis_1" , namelist%tile_size , dim_id_ydim) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid, "zaxis_2" , 4 , dim_id_soil) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid, "zaxis_3" , 3 , dim_id_snow) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid, "zaxis_4" , 7 , dim_id_snso) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_def_dim(ncid, "Time" , NF90_UNLIMITED , dim_id_time) - if (status /= nf90_noerr) call handle_err(status) - -! define dimension variables (for JEDI) - - status = nf90_def_var(ncid, "Time", NF90_DOUBLE, & - (/dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "xaxis_1", NF90_DOUBLE, & - (/dim_id_xdim/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "yaxis_1", NF90_DOUBLE, & - (/dim_id_ydim/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "zaxis_2", NF90_DOUBLE, & - (/dim_id_soil/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "zaxis_3", NF90_DOUBLE, & - (/dim_id_snow/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "zaxis_4", NF90_DOUBLE, & - (/dim_id_snso/), varid) - if (status /= nf90_noerr) call handle_err(status) - - -! Define variables in the file. - - status = nf90_def_var(ncid, "sheleg", NF90_DOUBLE, & ! note: this is weasd in vector file. - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "snwdph", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "snowxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "sneqvoxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "zsnsoxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_snso,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "tsnoxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_snow,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "snicexy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_snow,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "snliqxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_snow,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "stc", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_soil,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "smc", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_soil,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "slmsk", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "vtype", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "slc", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_soil,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_def_var(ncid, "tgxy", NF90_DOUBLE, & - (/dim_id_xdim,dim_id_ydim,dim_id_time/), varid) - if (status /= nf90_noerr) call handle_err(status) - - status = nf90_enddef(ncid) - - -! fill dimension variables - - status = nf90_inq_varid(ncid, "Time", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/1/) ) - - status = nf90_inq_varid(ncid, "xaxis_1", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/(i, i=1, namelist%tile_size)/) ) - - status = nf90_inq_varid(ncid, "yaxis_1", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/(i, i=1, namelist%tile_size)/) ) - - status = nf90_inq_varid(ncid, "zaxis_2", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/(i, i=1, 4)/) ) - - status = nf90_inq_varid(ncid, "zaxis_3", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/(i, i=1, 3)/) ) - - status = nf90_inq_varid(ncid, "zaxis_4", varid) - if (status /= nf90_noerr) call handle_err(status) - status = nf90_put_var(ncid, varid ,(/(i, i=1, 7)/) ) - -! Start writing restart file - - status = nf90_inq_varid(ncid, "sheleg", varid) - status = nf90_put_var(ncid, varid , tile%swe(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "snwdph", varid) - status = nf90_put_var(ncid, varid , tile%snow_depth(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "snowxy", varid) - status = nf90_put_var(ncid, varid , tile%active_snow_layers(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "sneqvoxy", varid) - status = nf90_put_var(ncid, varid , tile%swe_previous(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "zsnsoxy", varid) - status = nf90_put_var(ncid, varid , tile%snow_soil_interface(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 7, 1/)) - - status = nf90_inq_varid(ncid, "tsnoxy", varid) - status = nf90_put_var(ncid, varid , tile%temperature_snow(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "snicexy", varid) - status = nf90_put_var(ncid, varid , tile%snow_ice_layer(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "snliqxy", varid) - status = nf90_put_var(ncid, varid , tile%snow_liq_layer(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 3, 1/)) - - status = nf90_inq_varid(ncid, "stc", varid) - status = nf90_put_var(ncid, varid , tile%temperature_soil(:,:,:,itile) , & - start = (/1,1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - - status = nf90_inq_varid(ncid, "smc", varid) - status = nf90_put_var(ncid, varid , tile%soil_moisture_total(:,:,:,itile) , & - start = (/1,1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - -! include in output, so can be used to id which tile grid cells are being simulated - status = nf90_inq_varid(ncid, "slmsk", varid) - status = nf90_put_var(ncid, varid , tile%slmsk(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_inq_varid(ncid, "vtype", varid) - status = nf90_put_var(ncid, varid , tile%vegetation_type(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - -! include for JEDI QC of SMAP obs - status = nf90_inq_varid(ncid, "slc", varid) - status = nf90_put_var(ncid, varid , tile%soil_moisture_liquid(:,:,:,itile) , & - start = (/1 , 1 , 1, 1/), & - count = (/namelist%tile_size, namelist%tile_size, 4, 1/)) - - status = nf90_inq_varid(ncid, "tgxy", varid) - status = nf90_put_var(ncid, varid , tile%temperature_ground(:,:,itile) , & - start = (/1,1,1/), count = (/namelist%tile_size, namelist%tile_size, 1/)) - - status = nf90_close(ncid) - - end do - - end subroutine WriteTileRestart - - subroutine ReadVectorLength(filename, vector_length) - - use netcdf - - character*256 :: filename - integer :: vector_length - integer :: length_from_file - integer :: ncid, dimid, varid, status - - status = nf90_open(filename, NF90_NOWRITE, ncid) - - status = nf90_inq_dimid(ncid, "location", dimid) - status = nf90_inquire_dimension(ncid, dimid, len = length_from_file) - - status = nf90_close(ncid) - - if(vector_length /= length_from_file) then - print*, "number of land points in tiles not consistent with land model vector length" - stop 10 - else - print*, "number of land points in tiles consistent with land model vector length" - end if - - end subroutine ReadVectorLength - - subroutine handle_err(status) - use netcdf - integer, intent ( in) :: status - - if(status /= nf90_noerr) then - print *, trim(nf90_strerror(status)) - stop 10 - end if - end subroutine handle_err - -end module vector2tile_restart_mod