diff --git a/.github/DISCUSSION_TEMPLATE/2-questions.yml b/.github/DISCUSSION_TEMPLATE/2-questions.yml
new file mode 100644
index 000000000..1bb506f44
--- /dev/null
+++ b/.github/DISCUSSION_TEMPLATE/2-questions.yml
@@ -0,0 +1,70 @@
+labels: [question]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thanks for your interest in PMP! Please follow the template below to ensure the development team and community can help you effectively.
+
+ - type: checkboxes
+ id: checks
+ attributes:
+ label: Question criteria
+ description: Please confirm and check all the following options.
+ options:
+ - label: I added a descriptive title here.
+ required: true
+ - label: I searched the [PMP GitHub Discussions](https://github.com/PCMDI/pcmdi_metrics/discussions) to find a similar question and didn't find it.
+ required: true
+ - label: I searched the [PMP documentation](http://pcmdi.github.io/pcmdi_metrics/).
+ required: true
+
+ - type: textarea
+ id: your-question
+ attributes:
+ label: Describe your question
+ description: |
+ Please help the community help you. The more specific you can be, the easier it will be to help.
+ validations:
+ required: true
+
+ - type: textarea
+ id: possible-answers
+ attributes:
+ label: Are there are any possible answers you came across?
+ description: |
+ This will help others determine if you're on the right track. Include links to pages you've researched (e.g., software docs, Stack Overflow posts).
+
+ - type: textarea
+ id: sample-code
+ attributes:
+ label: Minimal Complete Verifiable Example (MVCE)
+ description: |
+ Minimal, self-contained copy-pastable example that generates the question/issue if possible. Please be concise with code posted (e.g., module imports, publicly accessible files).
+ Examples that follow these guidelines are easier to parse. This section will be automatically formatted into code, so no need for markdown backticks.
+
+ See guidelines below on how to provide a good MCVE:
+
+ - [Minimal Complete Verifiable Examples](https://stackoverflow.com/help/mcve)
+ - [Craft Minimal Bug Reports](http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports)
+ render: python
+
+ - type: textarea
+ id: log-output
+ attributes:
+ label: Relevant log output
+ description: Please copy and paste any relevant output. This will be automatically formatted into code, so no need for markdown backticks.
+ render: python
+
+ - type: textarea
+ id: show-versions
+ attributes:
+ label: Environment
+ description: |
+ If an MVCE and log output was provided, share your PMP version and some other information if your environment here
+
+ - type: textarea
+ id: extra
+ attributes:
+ label: Anything else we need to know?
+ description: |
+ Please describe any other information you want to share.
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..f72e03459
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,61 @@
+name: Bug Report
+description: File a bug report to help us improve PMP
+title: "[Bug]: "
+labels: ["Type: Bug"]
+assignees: []
+body:
+ - type: textarea
+ id: what-happened
+ attributes:
+ label: What happened?
+ description: |
+ Thanks for reporting a bug! Please describe what you were trying to get done.
+ Tell us what happened, what went wrong.
+ validations:
+ required: true
+
+ - type: textarea
+ id: what-did-you-expect-to-happen
+ attributes:
+ label: What did you expect to happen? Are there are possible answers you came across?
+ description: |
+ Describe what you expected to happen. Include links to pages you've researched (e.g., software docs, Stack Overflow posts).
+ validations:
+ required: false
+
+ - type: textarea
+ id: sample-code
+ attributes:
+ label: Minimal Complete Verifiable Example (MVCE)
+ description: |
+ Minimal, self-contained copy-pastable example that generates the issue if possible. Please be concise with code posted (e.g., module imports, publicly accessible files).
+ Bug reports that follow these guidelines are easier to diagnose, and so are often handled much more quickly. This section will be automatically formatted into code, so no need for markdown backticks.
+
+ See guidelines below on how to provide a good MCVE:
+
+ - [Minimal Complete Verifiable Examples](https://stackoverflow.com/help/mcve)
+ - [Craft Minimal Bug Reports](http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports)
+ render: python
+
+ - type: textarea
+ id: log-output
+ attributes:
+ label: Relevant log output
+ description: Please copy and paste any relevant output. This will be automatically formatted into code, so no need for markdown backticks.
+ render: python
+
+ - type: textarea
+ id: extra
+ attributes:
+ label: Anything else we need to know?
+ description: |
+ Please describe any other information you want to share.
+
+ - type: textarea
+ id: show-versions
+ attributes:
+ label: Environment
+ description: |
+ Please share some information about your environment.
+ validations:
+ required: true
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..c2816362f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,11 @@
+blank_issues_enabled: true
+contact_links:
+ - name: Questions (PMP)
+ url: https://github.com/PCMDI/pcmdi_metrics/discussions
+ about: |
+ Ask questions and discuss with other PMP community members here. Please
+ browse the PMP Discussions Forum or PMP documentation first before asking a
+ question to make sure it is not already answered. If you can't find an
+ answer, please include a self-contained reproducible example with your
+ question if possible. Thanks!
+
diff --git a/.github/ISSUE_TEMPLATE/documentation.yml b/.github/ISSUE_TEMPLATE/documentation.yml
new file mode 100644
index 000000000..fd5b1e767
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/documentation.yml
@@ -0,0 +1,15 @@
+name: Documentation Update
+description: Update PMP documentation
+title: "[Doc]: "
+labels: ["Type: Documentation"]
+assignees: []
+body:
+ - type: textarea
+ id: description
+ attributes:
+ label: Describe your documentation update
+ description: |
+ Concise description of why the documentation is being updated (e.g., missing content for new feature, typo)
+ If this is related to an issue or PR, please mention it.
+ validations:
+ required: true
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 000000000..70ef8982c
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,42 @@
+name: Feature Request
+description: Suggest an idea for PMP
+title: "[Feature]: "
+labels: ["Type: Enhancement"]
+assignees: []
+body:
+ - type: textarea
+ id: description
+ attributes:
+ label: Is your feature request related to a problem?
+ description: |
+ Please do a quick search of existing issues to make sure that this has not been asked before.
+ Please provide a clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ validations:
+ required: true
+
+ - type: textarea
+ id: solution
+ attributes:
+ label: Describe the solution you'd like
+ description: |
+ A clear and concise description of what you want to happen.
+ validations:
+ required: false
+
+ - type: textarea
+ id: alternatives
+ attributes:
+ label: Describe alternatives you've considered
+ description: |
+ A clear and concise description of any alternative solutions or features you've considered.
+ validations:
+ required: false
+
+ - type: textarea
+ id: additional-context
+ attributes:
+ label: Additional context
+ description: |
+ Add any other context about the feature request here.
+ validations:
+ required: false
diff --git a/.vscode/pcmdi_metrics.code-workspace b/.vscode/pcmdi_metrics.code-workspace
new file mode 100644
index 000000000..93362f2d5
--- /dev/null
+++ b/.vscode/pcmdi_metrics.code-workspace
@@ -0,0 +1,53 @@
+// This file stores the pcmdi_metrics repository's VS Code workspace settings.
+// Simply open up this file in VS Code and the editor will be automatically configured using this file.
+// Workspace settings take precedence over your user settings.
+{
+ "folders": [
+ {
+ "path": ".."
+ }
+ ],
+ "settings": {
+ // ===================
+ // Editor settings
+ // ===================
+ "editor.formatOnSave": true,
+ // ===================
+ // Python settings
+ // ===================
+ "[python]": {
+ // editor.rulers: [comments, max line length, wrap line length],
+ // Black does not wrap comments.
+ "editor.rulers": [80, 88, 120],
+ "editor.wordWrap": "wordWrapColumn",
+ "editor.wordWrapColumn": 120,
+ "editor.defaultFormatter": "ms-python.black-formatter"
+ },
+ "black-formatter.importStrategy": "fromEnvironment",
+ // Code Formatting and Linting
+ // ---------------------------
+ "flake8.args": ["--config=setup.cfg"],
+ "flake8.importStrategy": "fromEnvironment",
+ // Type checking
+ // ---------------------------
+ "mypy-type-checker.args": ["--config=pyproject.toml"],
+ "mypy-type-checker.importStrategy": "fromEnvironment",
+ // Testing
+ // ---------------------------
+ "python.testing.unittestEnabled": false,
+ "python.testing.pytestEnabled": true,
+ // NOTE: Debugger doesn't work if pytest-cov is enabled, so set "--no-cov"
+ // https://github.com/microsoft/vscode-python/issues/693
+ "python.testing.pytestArgs": ["--no-cov"],
+ // ===================
+ // Extension settings
+ // ===================
+ "jupyter.notebookFileRoot": "${workspaceFolder}",
+ "autoDocstring.docstringFormat": "numpy",
+ "[restructuredtext]": {
+ "editor.rulers": [88, 120],
+ "editor.wordWrap": "wordWrapColumn",
+ "editor.wordWrapColumn": 120
+ }
+ }
+}
diff --git a/docs/_static/git-flow.svg b/docs/_static/git-flow.svg
new file mode 100644
index 000000000..593e5149f
--- /dev/null
+++ b/docs/_static/git-flow.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/docs/_static/pre-commit-flow.svg b/docs/_static/pre-commit-flow.svg
new file mode 100644
index 000000000..a0d663061
--- /dev/null
+++ b/docs/_static/pre-commit-flow.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/docs/contributing.rst b/docs/contributing.rst
new file mode 100644
index 000000000..329aa8435
--- /dev/null
+++ b/docs/contributing.rst
@@ -0,0 +1,344 @@
+.. highlight:: shell
+
+==================
+Contributing Guide
+==================
+
+Contributions are welcome and greatly appreciated! Every little bit helps, and credit will always be given.
+
+Types of Contributions
+----------------------
+
+PCMDI Metrics Package (PMP) includes issue templates based on the contribution type: https://github.com/PCMDI/pcmdi_metrics/issues/new/choose.
+Note, new contributions must be made under the Apache-2.0 with LLVM exception license.
+
+Bug Report
+~~~~~~~~~~
+
+Look through the `GitHub Issues`_ for bugs to fix. Any unassigned issues tagged with "Type: Bug" is open for implementation.
+
+Feature Request
+~~~~~~~~~~~~~~~
+
+Look through the `GitHub Issues`_ for feature suggestions. Any unassigned issues tagged with "Type: Enhancement" is open for implementation.
+
+If you are proposing a feature:
+
+* Explain in detail how it would work.
+* Keep the scope as narrow as possible, to make it easier to implement.
+* Remember that this is a open-source project, and that contributions are welcome :)
+
+Documentation Update
+~~~~~~~~~~~~~~~~~~~~
+
+Help improve PMP's documentation, whether that be the Sphinx documentation or the API docstrings.
+
+Community Discussion
+~~~~~~~~~~~~~~~~~~~~
+
+Take a look at the `GitHub Discussions`_ page to get involved, share ideas, or ask questions.
+
+.. _cf_xarray: https://cf-xarray.readthedocs.io/en/latest/index.html
+.. _CF convention: http://cfconventions.org/
+.. _GitHub Issues: https://github.com/PCMDI/pcmdi_metrics/issues
+.. _GitHub Discussions: https://github.com/PCMDI/pcmdi_metrics/discussions
+
+Version Control
+---------------
+
+The repository uses branch-based (core team) and fork-based (external collaborators)
+Git workflows with tagged software releases.
+
+.. figure:: _static/git-flow.svg
+ :alt: Git Flow Diagram
+
+Guidelines
+~~~~~~~~~~
+
+1. ``main`` must always be deployable
+2. All changes are made through support branches
+3. Rebase with the latest ``main`` to avoid/resolve conflicts
+4. Make sure pre-commit quality assurance checks pass when committing (enforced in CI/CD build)
+5. Open a pull request early for discussion
+6. Once the CI/CD build passes and pull request is approved, squash and rebase your commits
+7. Merge pull request into ``main`` and delete the branch
+
+Things to Avoid
+~~~~~~~~~~~~~~~
+
+1. Don't merge in broken or commented out code
+2. Don't commit directly to ``main``
+
+ * There are branch-protection rules for ``main``
+
+3. Don't merge with conflicts. Instead, handle conflicts upon rebasing
+
+Source: https://gist.github.com/jbenet/ee6c9ac48068889b0912
+
+Pre-commit
+~~~~~~~~~~
+The repository uses the pre-commit package to manage pre-commit hooks.
+These hooks help with quality assurance standards by identifying simple issues
+at the commit level before submitting code reviews.
+
+.. figure:: _static/pre-commit-flow.svg
+ :alt: Pre-commit Flow Diagram
+
+ pre-commit Flow
+
+
+Get Started
+------------
+
+Ready to contribute? Here's how to set up PMP for local development.
+
+VS Code, the editor of choice
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We recommend using VS Code as your IDE because it is open-source and has great Python development support.
+
+Get VS Code here: https://code.visualstudio.com
+
+VS Code Setup
+^^^^^^^^^^^^^
+PMP includes a VS Code workspace file (``.vscode/pcmdi_metrics.code-setting``). This file automatically configures your IDE with the quality assurance tools, code line-length rulers, and more.
+
+Make sure to follow the :ref:`Local Development` section below.
+
+Recommended VS Code Extensions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ * `Python `_
+ * `Pylance `_
+ * `Python Docstring Generator `_
+ * `Python Type Hint `_
+ * `Better Comments `_
+ * `Jupyter `_
+ * `Visual Studio Intellicode `_
+
+
+.. _Local Development:
+
+Local Development
+~~~~~~~~~~~~~~~~~
+
+1. Download and install Conda
+
+ Linux
+ ::
+
+ $ wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
+ $ bash ./Miniconda3-latest-Linux-x86_64.sh
+ Do you wish the installer to initialize Miniconda3 by running conda init? [yes|no] yes
+
+
+ MacOS
+ ::
+
+ $ wget https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
+ $ bash ./Miniconda3-latest-MacOSX-x86_64.sh
+ Do you wish the installer to initialize Miniconda3 by running conda init? [yes|no] yes
+
+2. Fork the ``pcmdi_metrics`` repo on GitHub.
+
+ - If you are a maintainer, you can clone and branch directly from the root repository here: https://github.com/PCMDI/pcmdi_metrics
+
+3. Clone your fork locally::
+
+ $ git clone git@github.com:your_name_here/pcmdi_metrics.git
+
+4. Open ``.vscode/pcmdi_metrics.code-settings`` in VS Code
+
+
+5. Create and activate Conda development environment::
+
+ $ cd pcmdi_metrics
+ $ conda env create -f conda-env/dev.yml
+ $ conda activate pcmdi_metrics_dev
+
+6. Set VS Code Python interpretor to ``pcmdi_metrics_dev``
+
+7. Install pre-commit::
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
+
+8. Create a branch for local development and make changes::
+
+ $ git checkout -b
+
+9. `` During or after making changes, check for formatting or linting issues using pre-commit::
+
+ # Step 9 performs this automatically on staged files in a commit
+ $ pre-commit run --all-files
+
+ Trim Trailing Whitespace.................................................Passed
+ Fix End of Files.........................................................Passed
+ Check Yaml...............................................................Passed
+ black....................................................................Passed
+ isort....................................................................Passed
+ flake8...................................................................Passed
+
+10. Commit your changes::
+
+ $ git add .
+ $ git commit -m
+
+ Trim Trailing Whitespace.................................................Passed
+ Fix End of Files.........................................................Passed
+ Check Yaml...............................................................Passed
+ black....................................................................Passed
+ isort....................................................................Passed
+ flake8...................................................................Passed
+
+11. Make sure pre-commit QA checks pass. Otherwise, fix any caught issues.
+
+ - Most of the tools fix issues automatically so you just need to re-stage the files.
+ - flake8 and mypy issues must be fixed automatically.
+
+12. Push changes::
+
+ $ git push origin
+
+13. Submit a pull request through the GitHub website.
+
+
+Pull Request Guidelines
+-----------------------
+
+Before you submit a pull request, check that it meets these guidelines:
+
+1. The pull request should include tests for new or modified code.
+2. Link issues to pull requests.
+3. If the pull request adds functionality, the docs should be updated. Put
+ your new functionality into a function with a docstring, and add the
+ feature to the list in README.rst.
+4. Squash and rebase commits for a clean and navigable Git history.
+
+When you open a pull request on GitHub, there is a template available for use.
+
+
+Style Guide
+-----------
+
+PMP integrates the Black code formatter for code styling. If you want to learn more, please read about it `here `__.
+
+PMP also leverages `Python Type Annotations `_ to help the project scale.
+`mypy `_ performs optional static type checking through pre-commit.
+
+Testing
+-------
+
+Testing your local changes are important to ensure long-term maintainability and extensibility of the project.
+Since PMP is an open source library, we aim to avoid as many bugs as possible from reaching the end-user.
+
+To get started, here are guides on how to write tests using pytest:
+
+- https://docs.pytest.org/en/latest/
+- https://docs.python-guide.org/writing/tests/#py-test
+
+In most cases, if a function is hard to test, it is usually a symptom of being too complex (high cyclomatic-complexity).
+
+DOs for Testing
+~~~~~~~~~~~~~~~
+
+* *DO* write tests for new or refactored code
+* *DO* try to follow test-driven-development
+* *DO* use the Coverage reports to see lines of code that need to be tested
+* *DO* focus on simplistic, small, reusable modules for unit testing
+* *DO* cover as many edge cases as possible when testing
+
+DON'Ts for Testing
+~~~~~~~~~~~~~~~~~~
+
+* *DON'T* push or merge untested code
+* *DON'T* introduce tests that fail or produce warnings
+
+Documenting Code
+----------------
+
+If you are using VS code, the `Python Docstring Generator `_ extension can be used to auto-generate a docstring snippet once a function/class has been written.
+If you want the extension to generate docstrings in Sphinx format, you must set the ``"autoDocstring.docstringFormat": "sphinx"`` setting, under File > Preferences > Settings.
+
+Note that it is best to write the docstrings once you have fully defined the function/class, as then the extension will generate the full docstring.
+If you make any changes to the code once a docstring is generated, you will have to manually go and update the affected docstrings.
+
+More info on docstrings here: https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html
+
+DOs for Documenting Code
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+* *DO* explain **why** something is done, its purpose, and its goal. The code shows **how** it is done, so commenting on this can be redundant.
+* *DO* explain ambiguity or complexities to avoid confusion
+* *DO* embrace documentation as an integral part of the overall development process
+* *DO* treat documenting as code and follow principles such as *Don't Repeat Yourself* and *Easier to Change*
+
+DON'Ts for Documenting Code
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* *DON'T* write comments as a crutch for poor code
+* *DON'T* comment *every* function, data structure, type declaration
+
+Developer Tips
+--------------
+
+* flake8 will warn you if the cyclomatic complexity of a function is too high.
+
+ * https://github.com/PyCQA/mccabe
+
+
+FAQs
+----
+
+.. _Why squash and rebase?:
+
+Why squash and rebase commits?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Before you merge a support branch back into ``main``, the branch is typically squashed down to a single buildable commit, and then rebased on top of the main repo's ``main`` branch.
+
+Why?
+
+* Ensures build passes from the commit
+* Cleans up Git history for easy navigation
+* Makes collaboration and review process more efficient
+* Makes handling conflicts from rebasing simple since you only have to deal with conflicted commits
+
+
+How do I squash and rebase commits?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* Use GitHub's Squash and Merge feature in the pull request
+
+ * You still need to rebase on the latest ``main`` if ``main`` is ahead of your branch.
+
+* Manually squash and rebase
+
+ 1. `` Sync your fork of ``main`` (aka ``origin``) with the root ``main`` (aka ``upstream``) ::
+
+ git checkout main
+ git rebase upstream/main
+ git push -f origin main
+
+ 2. Get the SHA of the commit OR number of commits to rebase to ::
+
+ git checkout
+ git log --graph --decorate --pretty=oneline --abbrev-commit
+
+ 3. Squash commits::
+
+ git rebase -i [SHA]
+
+ # OR
+
+ git rebase -i HEAD~[NUMBER OF COMMITS]
+
+ 4. Rebase branch onto ``main`` ::
+
+ git rebase main
+ git push -f origin
+
+ 5. Make sure your squashed commit messages are refined
+
+ 6. Force push to remote branch ::
+
+ git push -f origin
diff --git a/docs/index.rst b/docs/index.rst
index 62ebe58cd..ed1adb90e 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -78,6 +78,7 @@ BSD 3-Clause License. See `LICENSE
diff --git a/pcmdi_metrics/io/default_regions_define.py b/pcmdi_metrics/io/default_regions_define.py
index 4511e2c43..b5362a2a7 100755
--- a/pcmdi_metrics/io/default_regions_define.py
+++ b/pcmdi_metrics/io/default_regions_define.py
@@ -5,25 +5,29 @@ def load_regions_specs():
regions_specs = {
# Mean Climate
+ "global": {},
"NHEX": {"domain": {"latitude": (30.0, 90)}},
"SHEX": {"domain": {"latitude": (-90.0, -30)}},
"TROPICS": {"domain": {"latitude": (-30.0, 30)}},
- "global": {},
"90S50S": {"domain": {"latitude": (-90.0, -50)}},
"50S20S": {"domain": {"latitude": (-50.0, -20)}},
"20S20N": {"domain": {"latitude": (-20.0, 20)}},
"20N50N": {"domain": {"latitude": (20.0, 50)}},
"50N90N": {"domain": {"latitude": (50.0, 90)}},
+ "CONUS": {"domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}},
+ "land": {"value": 100},
"land_NHEX": {"value": 100, "domain": {"latitude": (30.0, 90)}},
"land_SHEX": {"value": 100, "domain": {"latitude": (-90.0, -30)}},
"land_TROPICS": {"value": 100, "domain": {"latitude": (-30.0, 30)}},
- "land": {"value": 100},
+ "land_CONUS": {"value": 100, "domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}},
+ "ocean": {"value": 0},
"ocean_NHEX": {"value": 0, "domain": {"latitude": (30.0, 90)}},
"ocean_SHEX": {"value": 0, "domain": {"latitude": (-90.0, -30)}},
"ocean_TROPICS": {"value": 0, "domain": {"latitude": (30.0, 30)}},
- "ocean": {"value": 0},
- "CONUS": {"domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}},
- "land_CONUS": {"value": 100, "domain": {"latitude": (24.7, 49.4), "longitude": (-124.78, -66.92)}},
+ "ocean_50S50N" : {"value":0.,'domain': {"latitude": (-50., 50)}},
+ "ocean_50S20S" : {"value":0.,'domain': {"latitude": (-50., -20)}},
+ "ocean_20S20N": {"value":0.,'domain': {"latitude": (-20., 20)}},
+ "ocean_20N50N" : {"value":0.,'domain': {"latitude": (20., 50)}},
# Modes of variability
"NAM": {"domain": {"latitude": (20.0, 90), "longitude": (-180, 180)}},
"NAO": {"domain": {"latitude": (20.0, 80), "longitude": (-90, 40)}},
diff --git a/pcmdi_metrics/io/region_from_file.py b/pcmdi_metrics/io/region_from_file.py
new file mode 100644
index 000000000..ce43aebd9
--- /dev/null
+++ b/pcmdi_metrics/io/region_from_file.py
@@ -0,0 +1,34 @@
+import geopandas as gpd
+import regionmask
+import xarray as xr
+import xcdat
+
+def region_from_file(data,rgn_path,attr,feature):
+ # Return data masked from a feature in input file.
+ # Arguments:
+ # data: xcdat dataset
+ # feature: str, name of region
+ # rgn_path: str, path to file
+ # attr: str, attribute name
+
+ lon = data["lon"].data
+ lat = data["lat"].data
+
+ print("Reading region from file.")
+ try:
+ regions_df = gpd.read_file(rgn_path)
+ regions = regionmask.from_geopandas(regions_df,names=attr)
+ mask = regions.mask(lon, lat)
+ # Can't match mask by name, rather index of name
+ val = list(regions_df[attr]).index(feature)
+ except Exception as e:
+ print("Error in creating region subset from file:")
+ raise e
+
+ try:
+ masked_data = data.where(mask == val)
+ except Exception as e:
+ print("Error: Region selection failed.")
+ raise e
+
+ return masked_data
diff --git a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py
index 06633b1e0..ba542bdf1 100644
--- a/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py
+++ b/pcmdi_metrics/mean_climate/lib/create_mean_climate_parser.py
@@ -266,4 +266,13 @@ def create_mean_climate_parser():
required=False,
)
+ parser.add_argument(
+ "--parallel",
+ type=bool,
+ dest="parallel",
+ default=False,
+ help="Option for running code in parallel mode: True / False (default)",
+ required=False,
+ )
+
return parser
diff --git a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py
index 9428f569a..b09c963c2 100644
--- a/pcmdi_metrics/mean_climate/lib/load_and_regrid.py
+++ b/pcmdi_metrics/mean_climate/lib/load_and_regrid.py
@@ -49,10 +49,41 @@ def load_and_regrid(data_path, varname, varname_in_file=None, level=None, t_grid
# level - extract a specific level if needed
if level is not None:
- level = level * 100 # hPa to Pa
- ds = ds.sel(plev=level)
- if debug:
- print('ds:', ds)
+ if isinstance(level, int) or isinstance(level, float):
+ pass
+ else:
+ level = float(level)
+
+ # check vertical coordinate first
+ if 'plev' in list(ds.coords.keys()):
+ if ds.plev.units == 'Pa':
+ level = level * 100 # hPa to Pa
+ try:
+ ds = ds.sel(plev=level)
+ except Exception as ex:
+ print('WARNING: ', ex)
+
+ nearest_level = find_nearest(ds.plev.values, level)
+
+ print(' Given level', level)
+ print(' Selected nearest level from dataset:', nearest_level)
+
+ diff_percentage = abs(nearest_level - level) / level * 100
+ if diff_percentage < 0.1: # acceptable if differance is less than 0.1%
+ ds = ds.sel(plev=level, method='nearest')
+ print(' Difference is in acceptable range.')
+ pass
+ else:
+ print('ERROR: Difference between two levels are too big!')
+ return
+ if debug:
+ print('ds:', ds)
+ print('ds.plev.units:', ds.plev.units)
+ else:
+ print('ERROR: plev is not in the nc file. Check vertical coordinate.')
+ print(' Coordinates keys in the nc file:', list(ds.coords.keys()))
+ print('ERROR: load and regrid can not complete')
+ return
# regrid
if regrid_tool == 'regrid2':
@@ -78,3 +109,10 @@ def load_and_regrid(data_path, varname, varname_in_file=None, level=None, t_grid
if debug:
print('ds_regridded:', ds_regridded)
return ds_regridded
+
+
+def find_nearest(array, value):
+ array = np.asarray(array)
+ idx = (np.abs(array - value)).argmin()
+ return array[idx]
+
diff --git a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py
index b614f3559..0e42f7ec7 100644
--- a/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py
+++ b/pcmdi_metrics/mean_climate/lib/mean_climate_metrics_to_json.py
@@ -21,10 +21,11 @@ def mean_climate_metrics_to_json(
for m in models_in_dict:
if m == model:
for ref in list(json_dict["RESULTS"][m].keys()):
- runs_in_model_dict = list(json_dict["RESULTS"][m][ref].keys())
- for r in runs_in_model_dict:
- if (r != run) and (run is not None):
- del json_dict["RESULTS"][m][ref][r]
+ if ref != "units":
+ runs_in_model_dict = list(json_dict["RESULTS"][m][ref].keys())
+ for r in runs_in_model_dict:
+ if (r != run) and (run is not None):
+ del json_dict["RESULTS"][m][ref][r]
else:
del json_dict["RESULTS"][m]
# Write selected dict to JSON
diff --git a/pcmdi_metrics/mean_climate/mean_climate_driver.py b/pcmdi_metrics/mean_climate/mean_climate_driver.py
index 5d41bca63..1a9fd6c62 100755
--- a/pcmdi_metrics/mean_climate/mean_climate_driver.py
+++ b/pcmdi_metrics/mean_climate/mean_climate_driver.py
@@ -20,6 +20,7 @@
mean_climate_metrics_to_json,
)
from pcmdi_metrics.variability_mode.lib import tree
+from pcmdi_metrics.variability_mode.lib import sort_human
parser = create_mean_climate_parser()
@@ -49,6 +50,7 @@
custom_obs = parameter.custom_observations
debug = parameter.debug
cmec = parameter.cmec
+parallel = parameter.parallel
if metrics_output_path is not None:
metrics_output_path = parameter.metrics_output_path.replace('%(case_id)', case_id)
@@ -59,15 +61,15 @@
diagnostics_output_path = diagnostics_output_path.replace('%(case_id)', case_id)
find_all_realizations = False
+first_realization_only = False
if realization is None:
realization = ""
- realizations = [realization]
elif isinstance(realization, str):
if realization.lower() in ["all", "*"]:
find_all_realizations = True
- realizations = "Search for all realizations!!"
- else:
- realizations = [realization]
+ elif realization.lower() in ["first", "first_only"]:
+ first_realization_only = True
+realizations = [realization]
if debug:
print('regions_specs (before loading internally defined):', regions_specs)
@@ -145,6 +147,9 @@
# -------------
# variable loop
# -------------
+if isinstance(vars, str):
+ vars = [vars]
+
for var in vars:
if '_' in var or '-' in var:
@@ -206,7 +211,7 @@
result_dict["RESULTS"][model][ref]["source"] = ref_dataset_name
- if find_all_realizations:
+ if find_all_realizations or first_realization_only:
test_data_full_path = os.path.join(
test_data_path,
filename_template).replace('%(variable)', varname).replace('%(model)', model).replace('%(model_version)', model).replace('%(realization)', '*')
@@ -215,6 +220,9 @@
realizations = []
for ncfile in ncfiles:
realizations.append(ncfile.split('/')[-1].split('.')[3])
+ realizations = sort_human(realizations)
+ if first_realization_only:
+ realizations = realizations[0:1]
print('realizations (after search): ', realizations)
for run in realizations:
@@ -279,7 +287,7 @@
if debug:
print('ds_test_tmp:', ds_test_tmp)
- ds_test_dict[region].to_netcdf('_'.join([var, 'model', model, run, region + '.nc']))
+ ds_test_dict[region].to_netcdf('_'.join([var, 'model', model, run, region, case_id + '.nc']))
if model == test_data_set[0] and run == realizations[0]:
ds_ref_dict[region].to_netcdf('_'.join([var, 'ref', region + '.nc']))
@@ -287,18 +295,18 @@
print('compute metrics start')
result_dict["RESULTS"][model][ref][run][region] = compute_metrics(varname, ds_test_dict[region], ds_ref_dict[region], debug=debug)
- # write individual JSON
- # --- single simulation, obs (need to accumulate later) / single variable
- json_filename_tmp = "_".join([model, var, target_grid, regrid_tool, "metrics", ref])
- mean_climate_metrics_to_json(
- os.path.join(metrics_output_path, var),
- json_filename_tmp,
- result_dict,
- model=model,
- run=run,
- cmec_flag=cmec,
- debug=debug
- )
+ # write individual JSON
+ # --- single simulation, obs (need to accumulate later) / single variable
+ json_filename_tmp = "_".join([var, model, run, target_grid, regrid_tool, "metrics", ref, case_id])
+ mean_climate_metrics_to_json(
+ os.path.join(metrics_output_path, var),
+ json_filename_tmp,
+ result_dict,
+ model=model,
+ run=run,
+ cmec_flag=cmec,
+ debug=debug
+ )
except Exception as e:
if debug:
@@ -306,12 +314,17 @@
print('error occured for ', model, run)
print(e)
- # write collective JSON --- all models / all obs / single variable
- json_filename = "_".join([var, target_grid, regrid_tool, "metrics"])
- mean_climate_metrics_to_json(
- metrics_output_path,
- json_filename,
- result_dict,
- cmec_flag=cmec,
- )
- print('pmp mean clim driver completed')
+ # ========================================================================
+ # Dictionary to JSON: collective JSON at the end of model_realization loop
+ # ------------------------------------------------------------------------
+ if not parallel:
+ # write collective JSON --- all models / all obs / single variable
+ json_filename = "_".join([var, target_grid, regrid_tool, "metrics", case_id])
+ mean_climate_metrics_to_json(
+ metrics_output_path,
+ json_filename,
+ result_dict,
+ cmec_flag=cmec,
+ )
+
+print('pmp mean clim driver completed')
diff --git a/pcmdi_metrics/mjo/lib/mjo_metric_calc.py b/pcmdi_metrics/mjo/lib/mjo_metric_calc.py
index ea6962071..49f9acc58 100644
--- a/pcmdi_metrics/mjo/lib/mjo_metric_calc.py
+++ b/pcmdi_metrics/mjo/lib/mjo_metric_calc.py
@@ -167,21 +167,35 @@ def mjo_metric_ewr_calculation(
if plot:
os.makedirs(outdir(output_type="graphics"), exist_ok=True)
fout = os.path.join(outdir(output_type="graphics"), output_filename)
- title = (
- mip.upper()
- + ": "
- + model
- + " ("
- + run
- + ") \n"
- + var.capitalize()
- + ", "
- + season
- + " "
- + str(startYear)
- + "-"
- + str(endYear)
- )
+ if model == 'obs':
+ title = (
+ " OBS ("
+ + run
+ + ") \n"
+ + var.capitalize()
+ + ", "
+ + season
+ + " "
+ + str(startYear)
+ + "-"
+ + str(endYear)
+ )
+ else:
+ title = (
+ mip.upper()
+ + ": "
+ + model
+ + " ("
+ + run
+ + ") \n"
+ + var.capitalize()
+ + ", "
+ + season
+ + " "
+ + str(startYear)
+ + "-"
+ + str(endYear)
+ )
if cmmGrid:
title += ", common grid (2.5x2.5deg)"
plot_power(OEE, title, fout, ewr)
diff --git a/pcmdi_metrics/mjo/param/myParam_mjo.py b/pcmdi_metrics/mjo/param/myParam_mjo.py
index fddc93746..4456f66de 100644
--- a/pcmdi_metrics/mjo/param/myParam_mjo.py
+++ b/pcmdi_metrics/mjo/param/myParam_mjo.py
@@ -30,7 +30,7 @@ def find_latest(path):
# Observation
# -------------------------------------------------
reference_data_name = "GPCP-1-3"
-reference_data_path = "/p/user_pub/PCMDIobs/PCMDIobs2/atmos/day/pr/GPCP-1-3/gn/v20200707/pr_day_GPCP-1-3_BE_gn_v20200707_19961002-20170101.nc" # noqa
+reference_data_path = "/p/user_pub/PCMDIobs/obs4MIPs_legacy/PCMDIobs2/atmos/day/pr/GPCP-1-3/gn/v20200924/pr_day_GPCP-1-3_BE_gn_v20200924_19961002-20170101.nc" # noqa
varOBS = "pr"
ObsUnitsAdjust = (True, "multiply", 86400.0, "mm d-1") # kg m-2 s-1 to mm day-1
diff --git a/pcmdi_metrics/mjo/scripts/parallel_driver.py b/pcmdi_metrics/mjo/scripts/parallel_driver.py
index d43cc9db5..f8179a838 100755
--- a/pcmdi_metrics/mjo/scripts/parallel_driver.py
+++ b/pcmdi_metrics/mjo/scripts/parallel_driver.py
@@ -97,10 +97,10 @@
# =================================================
# Generates list of command
# -------------------------------------------------
-param_file = "../doc/myParam_mjo.py"
+param_file = "../param/myParam_mjo.py"
if debug:
- param_file = "../doc/myParam_test.py"
+ param_file = "../param/myParam_test.py"
print("number of models (debug mode):", len(models))
cmds_list = list()
@@ -148,7 +148,6 @@
for r, run in enumerate(runs_list):
# command line for queue
cmd = [
- "python",
"mjo_metrics_driver.py",
"-p",
param_file,
diff --git a/pcmdi_metrics/mjo/scripts/run.sh b/pcmdi_metrics/mjo/scripts/run.sh
index fcca06219..8911b0897 100755
--- a/pcmdi_metrics/mjo/scripts/run.sh
+++ b/pcmdi_metrics/mjo/scripts/run.sh
@@ -1,9 +1,6 @@
#!/bin/sh
set -a
-# grim: pmp_nightly_20190628
-# gates: cdat82_20191107_py37, pmp_nightly_20190912
-
#parallel=no
parallel=yes
@@ -17,7 +14,7 @@ mkdir -p log
if [ $parallel == no ]; then
echo 'parallel no'
for mip in $mips; do
- python -u mjo_metrics_driver.py -p ../doc/myParam_mjo.py --mip ${mip} >& log/log.${mip}.txt &
+ mjo_metrics_driver.py -p ../param/myParam_mjo.py --mip ${mip} >& log/log.${mip}.txt &
disown
done
elif [ $parallel == yes ]; then
@@ -25,7 +22,7 @@ elif [ $parallel == yes ]; then
modnames="all"
realization="all"
for mip in $mips; do
- python -u ./parallel_driver.py -p ../doc/myParam_mjo.py --mip ${mip} --num_workers $num_workers --modnames $modnames --realization $realization >& log/log.parallel.${mip}.txt &
+ python -u ./parallel_driver.py -p ../param/myParam_mjo.py --mip ${mip} --num_workers $num_workers --modnames $modnames --realization $realization >& log/log.parallel.${mip}.txt &
disown
done
fi
diff --git a/pcmdi_metrics/precip_variability/lib/lib_variability_across_timescales.py b/pcmdi_metrics/precip_variability/lib/lib_variability_across_timescales.py
index 5f54506c7..74352ec9a 100644
--- a/pcmdi_metrics/precip_variability/lib/lib_variability_across_timescales.py
+++ b/pcmdi_metrics/precip_variability/lib/lib_variability_across_timescales.py
@@ -63,7 +63,7 @@ def precip_variability_across_timescale(
# Write data (nc file)
outfilename = "PS_pr." + str(dfrq) + "_regrid.180x90_" + dat + ".nc"
custom_dataset = xr.merge([freqs, ps, rn, sig95])
- custom_dataset.to_netcdf(path=os.path.join(outdir(output_type="diagnostic_results"), outfilename))
+ custom_dataset.to_netcdf(path=os.path.join(outdir.replace("%(output_type)","diagnostic_results"), outfilename))
# Power spectum of anomaly
freqs, ps, rn, sig95 = Powerspectrum(anom, nperseg, noverlap)
@@ -72,7 +72,7 @@ def precip_variability_across_timescale(
# Write data (nc file)
outfilename = "PS_pr." + str(dfrq) + "_regrid.180x90_" + dat + "_unforced.nc"
custom_dataset = xr.merge([freqs, ps, rn, sig95])
- custom_dataset.to_netcdf(path=os.path.join(outdir(output_type="diagnostic_results"), outfilename))
+ custom_dataset.to_netcdf(path=os.path.join(outdir.replace("%(output_type)","diagnostic_results"), outfilename))
# Write data (json file)
psdmfm["RESULTS"][dat] = {}
@@ -83,7 +83,7 @@ def precip_variability_across_timescale(
"PS_pr." + str(dfrq) + "_regrid.180x90_area.freq.mean_" + dat + ".json"
)
JSON = pcmdi_metrics.io.base.Base(
- outdir(output_type="metrics_results"), outfilename
+ outdir.replace("%(output_type)","metrics_results"), outfilename
)
JSON.write(
psdmfm,
diff --git a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py
index c606be499..45d4e4ee1 100644
--- a/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py
+++ b/pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py
@@ -3,8 +3,6 @@
import glob
import os
-from genutil import StringConstructor
-
from pcmdi_metrics.mean_climate.lib.pmp_parser import PMPParser
from pcmdi_metrics.precip_variability.lib import (
AddParserArgument,
@@ -34,13 +32,12 @@
# Create output directory
case_id = param.case_id
-outdir_template = param.process_templated_argument("results_dir")
-outdir = StringConstructor(
- str(outdir_template(output_type="%(output_type)", mip=mip, case_id=case_id))
-)
+outdir_template = param.results_dir
+outdir_template = outdir_template.replace("%(mip)",str(mip)).replace("%(case_id)",str(case_id))
for output_type in ["graphics", "diagnostic_results", "metrics_results"]:
- os.makedirs(outdir(output_type=output_type), exist_ok=True)
- print(outdir(output_type=output_type))
+ outdir = outdir_template.replace("%(output_type)",output_type)
+ os.makedirs(outdir, exist_ok=True)
+ print(outdir)
# Check data in advance
file_list = sorted(glob.glob(os.path.join(modpath, mod)))
@@ -57,5 +54,5 @@
syr = prd[0]
eyr = prd[1]
precip_variability_across_timescale(
- file_list, syr, eyr, dfrq, mip, dat, var, fac, nperseg, noverlap, outdir, cmec
+ file_list, syr, eyr, dfrq, mip, dat, var, fac, nperseg, noverlap, outdir_template, cmec
)