From 1c048446dae7099809633ad5305dc0c3b7aaec1d Mon Sep 17 00:00:00 2001 From: Teagan King <98482480+TeaganKing@users.noreply.github.com> Date: Thu, 9 May 2024 10:23:11 -0600 Subject: [PATCH 01/23] Create linting.yaml --- .github/workflows/linting.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/linting.yaml diff --git a/.github/workflows/linting.yaml b/.github/workflows/linting.yaml new file mode 100644 index 0000000..d89171b --- /dev/null +++ b/.github/workflows/linting.yaml @@ -0,0 +1,16 @@ +name: code-style + +on: + push: + branches: 'main' + pull_request: + branches: '*' + +jobs: + linting: + name: 'pre-commit hooks' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.1 From f95ad20822690a325a3d028087172a168a9422e1 Mon Sep 17 00:00:00 2001 From: Teagan King <98482480+TeaganKing@users.noreply.github.com> Date: Thu, 9 May 2024 10:24:10 -0600 Subject: [PATCH 02/23] Create .pre-commit-config.yaml --- .pre-commit-config.yaml | 43 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..79319ff --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,43 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: debug-statements + - id: double-quote-string-fixer + - id: check-docstring-first + - id: check-json + + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black-jupyter + + - repo: https://github.com/asottile/reorder-python-imports + rev: v3.12.0 + hooks: + - id: reorder-python-imports + args: [--py38-plus, --add-import, 'from __future__ import annotations'] + + - repo: https://github.com/asottile/add-trailing-comma + rev: v3.1.0 + hooks: + - id: add-trailing-comma + + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.2 + hooks: + - id: pyupgrade + args: [--py38-plus] + + - repo: https://github.com/hhatto/autopep8 + rev: v2.1.0 + hooks: + - id: autopep8 + + - repo: https://github.com/PyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 From 0dc98dd6742805b84d9405820fb30e5c3a855b10 Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:00:22 -0600 Subject: [PATCH 03/23] remove trailing whitespace --- README.md | 2 +- docs/addingnotebookstocollection.md | 4 +- examples/coupled_model/config.yml | 23 +- ....FLTHIST_ne30.r328_gamma0.33_soae.001.yaml | 17 +- examples/nblibrary/ice/cice_vars.yml | 10 +- examples/nblibrary/ice/plot_diff.py | 162 ++++++------ examples/nblibrary/ice/vect_diff.py | 233 +++++++++++------- 7 files changed, 247 insertions(+), 204 deletions(-) diff --git a/README.md b/README.md index 945d681..7718e02 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ or you can view `${CUPID_ROOT}/examples/coupled-model/computed_notebooks/quick-r Furthermore, to clear the `computed_notebooks` folder which was generated by the `cupid-run` and `cupid-build` commands, you can run the following command: ``` bash -$ cupid-clear +$ cupid-clear ``` This will clear the `computed_notebooks` folder which is at the location pointed to by the `run_dir` variable in the `config.yml` file. diff --git a/docs/addingnotebookstocollection.md b/docs/addingnotebookstocollection.md index bf54ddd..c4bee82 100644 --- a/docs/addingnotebookstocollection.md +++ b/docs/addingnotebookstocollection.md @@ -23,7 +23,7 @@ Generally, a good fit for a diagnostic notebook is one that reads in CESM output none: param_specific_to_this_nb: some_value another_param: another_value - + If you just want the notebook run once on one set of parameters, keep the `parameter_groups: none:` notation as above. If you want the notebook executed multiple times with different parameter sets, the notation would look like this: your_new_nb_name: @@ -39,5 +39,5 @@ Generally, a good fit for a diagnostic notebook is one that reads in CESM output 6. If you'd like your new notebook included in the final Jupyter Book, add it to the Jupyter Book table of contents (`book_toc`). See [Jupyter Book's documentation](https://jupyterbook.org/en/stable/structure/toc.html) for different things you can do with this. 7. Update your parameters. Parameters that are specific to just this notebook should go under `parameter_groups` in the notebook's entry under `compute_notebooks`. Global parameters that you want passed in to every notebook in the collection should go under `global_params`. When `CUPiD` executes your notebook, all of these parameters will get put in a new cell below the cell tagged `parameters` that you added in step 3. This means they will supercede the values of the parameters that you put in the cell above---the names, notation, etc. should match to make sure your notebook is able to find the variables it needs. - + 8. All set! Your collection can now be run and built with `cupid-run` and `cupid-build` as usual. diff --git a/examples/coupled_model/config.yml b/examples/coupled_model/config.yml index 121217e..b6ec15f 100644 --- a/examples/coupled_model/config.yml +++ b/examples/coupled_model/config.yml @@ -1,4 +1,3 @@ - ################## SETUP ################## ################ @@ -33,7 +32,7 @@ computation_config: ### notebook in NOTEBOOK CONFIG default_kernel_name: cupid-analysis - + ############# NOTEBOOK CONFIG ############# ############################ @@ -54,8 +53,8 @@ timeseries: case_name: 'b.e23_alpha16b.BLT1850.ne30_t232.054' atm: - vars: ['ACTNI', 'ACTNL', 'ACTREI','ACTREL','AODDUST'] - derive_vars: [] # {'PRECT':['PRECL','PRECC'], 'RESTOM':['FLNT','FSNT']} + vars: ['ACTNI', 'ACTNL', 'ACTREI', 'ACTREL', 'AODDUST'] + derive_vars: [] # {'PRECT':['PRECL', 'PRECC'], 'RESTOM':['FLNT', 'FSNT']} hist_str: 'h0' start_years: [2] end_years: [102] @@ -96,12 +95,12 @@ timeseries: compute_notebooks: # This is where all the notebooks you want run and their - ### parameters are specified. Several examples of different - ### types of notebooks are provided. + # parameters are specified. Several examples of different + # types of notebooks are provided. # The first key (here simple_no_params_nb) is the name of the - ### notebook from nb_path_root, minus the .ipynb - + # notebook from nb_path_root, minus the .ipynb + infrastructure: index: parameter_groups: @@ -128,7 +127,7 @@ compute_notebooks: native: 'mom6.h.native.????-??.nc' static: 'mom6.h.static.nc' oce_cat: /glade/u/home/gmarques/libs/oce-catalogs/reference-datasets.yml - + lnd: land_comparison: parameter_groups: @@ -161,7 +160,7 @@ compute_notebooks: book_toc: # See https://jupyterbook.org/en/stable/structure/configure.html for - ## complete documentation of Jupyter book construction options + # complete documentation of Jupyter book construction options format: jb-book @@ -171,12 +170,12 @@ book_toc: parts: # Parts group notebooks into different sections in the Jupyter book - ### table of contents, so you can organize different parts of your project. + # table of contents, so you can organize different parts of your project. - caption: Atmosphere # Each chapter is the name of one of the notebooks that you executed - ### in compute_notebooks above, also without .ipynb + # in compute_notebooks above, also without .ipynb chapters: - file: atm/adf_quick_run diff --git a/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml b/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml index 880a4d0..bba9cb0 100644 --- a/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml +++ b/examples/nblibrary/atm/config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml @@ -62,7 +62,7 @@ diag_basic_info: #Is this a model vs observations comparison? #If "false" or missing, then a model-model comparison is assumed: compare_obs: false - + hist_str: cam.h0 #Generate HTML website (assumed false if missing): @@ -111,9 +111,6 @@ diag_basic_info: num_procs: 8 redo_plot: false - - - @@ -139,7 +136,7 @@ diag_cam_climo: yrs: ${diag_cam_climo.start_year}-${diag_cam_climo.end_year} - #Location of CAM climatologies: + #Location of CAM climatologies: cam_climo_loc: ${climo_loc}${diag_cam_climo.cam_case_name}/atm/proc/${diag_cam_climo.yrs}/ #model year when time series files should start: @@ -203,7 +200,7 @@ diag_cam_baseline_climo: cam_hist_loc: /glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing/${diag_cam_baseline_climo.cam_case_name}/atm/hist/ yrs: ${diag_cam_baseline_climo.start_year}-${diag_cam_baseline_climo.end_year} - + #Location of baseline CAM climatologies: cam_climo_loc: /glade/campaign/cesm/development/cross-wg/diagnostic_framework/ADF-data/climos/${diag_cam_baseline_climo.cam_case_name}/${diag_cam_baseline_climo.yrs}/ @@ -245,7 +242,7 @@ diag_cam_baseline_climo: overwrite_tem: false - case_nickname: ${diag_cam_baseline_climo.cam_case_name} + case_nickname: ${diag_cam_baseline_climo.cam_case_name} #This fourth set of variables provides settings for calling the Climate Variability @@ -365,10 +362,10 @@ diag_var_list: - ICEFRAC - OCNFRAC - LANDFRAC - + derived_var_list: - - RESTOM - + - RESTOM + # diff --git a/examples/nblibrary/ice/cice_vars.yml b/examples/nblibrary/ice/cice_vars.yml index 00e6d61..5c5de2d 100644 --- a/examples/nblibrary/ice/cice_vars.yml +++ b/examples/nblibrary/ice/cice_vars.yml @@ -3,22 +3,22 @@ aice: - levels: [0.05,0.10,0.15,0.20,0.30,0.40,0.50,0.60,0.70,0.80,0.85,0.90,0.95,0.99] - title: "Sea Ice Concentration" -hi: +hi: - levels: [0.05,0.1,0.25,0.5,0.75,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,5.0] - title: "Sea Ice Thickness (m)" hs: - levels: [0.01,0.03,0.05,0.07,0.10,0.13,0.15,0.20,0.25,0.30,0.35,0.40,0.45,0.50] - title: "Snow Depth (m)" -Tsfc: +Tsfc: - levels: [-40.,-37.,-34.,-31.,-28.,-25.,-22.,-19.,-16.,-13.,-10.,-5.,-3.,-1.] - title: "Surface Temperature (C)" -albsni: +albsni: - levels: [5,10,15,20,30,40,50, 60, 65, 70, 75, 80,85, 90] - title: "Snow Ice Albedo" -flat: +flat: - levels: [-18.,-16.,-14.,-12.,-10.,-8.,-6.,-5.,-4.,-3.,-2.,-1.,0.,2.] - title: "Latent Heat Flux (W/m^2}" -fsens: +fsens: - levels: [-30.,-25.,-20.,-15.,-10.,-5.,-2.5,0,2.5,5,10,15,20,25] - title: "Sensible Heat Flux (W/m^2)" congel: diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index 4eed1dc..b9c6a0c 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -1,4 +1,3 @@ - import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt @@ -7,86 +6,83 @@ import cartopy.crs as ccrs import cartopy.feature as cfeature -def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): - # make circular boundary for polar stereographic circular plots - theta = np.linspace(0, 2*np.pi, 100) - center, radius = [0.5, 0.5], 0.5 - verts = np.vstack([np.sin(theta), np.cos(theta)]).T - circle = mpath.Path(verts * radius + center) - - - if (np.size(levels) > 2): - cmap = mpl.colormaps['tab20'] - norm = mpl.colors.BoundaryNorm(levels, ncolors=cmap.N) - - # set up the figure with a North Polar Stereographic projection - fig = plt.figure(tight_layout=True) - gs = GridSpec(2, 4) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - field_diff = field2.values-field1.values - field_std = field_diff.std() - - this=ax.pcolormesh(TLON, - TLAT, - field1, - norm = norm, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - field2, - norm=norm, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2,fontsize=10) - - if (proj == "N"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - - ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - field_diff, - cmap="seismic",vmax=field_std*2.0,vmin=-field_std*2.0, - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2+"-"+case1,fontsize=10) - - plt.suptitle(title) +def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): + # make circular boundary for polar stereographic circular plots + theta = np.linspace(0, 2 * np.pi, 100) + center, radius = [0.5, 0.5], 0.5 + verts = np.vstack([np.sin(theta), np.cos(theta)]).T + circle = mpath.Path(verts * radius + center) + + if np.size(levels) > 2: + cmap = mpl.colormaps["tab20"] + norm = mpl.colors.BoundaryNorm(levels, ncolors=cmap.N) + + # set up the figure with a North Polar Stereographic projection + fig = plt.figure(tight_layout=True) + gs = GridSpec(2, 4) + + if proj == "N": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + field_diff = field2.values - field1.values + field_std = field_diff.std() + + this = ax.pcolormesh( + TLON, TLAT, field1, norm=norm, cmap="tab20", transform=ccrs.PlateCarree() + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) + + if proj == "N": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, TLAT, field2, norm=norm, cmap="tab20", transform=ccrs.PlateCarree() + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2, fontsize=10) + + if proj == "N": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + + ax.set_boundary(circle, transform=ax.transAxes) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + field_diff, + cmap="seismic", + vmax=field_std * 2.0, + vmin=-field_std * 2.0, + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2 + "-" + case1, fontsize=10) + + plt.suptitle(title) diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 4bf9c8f..d373dcc 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -1,4 +1,3 @@ - import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt @@ -7,21 +6,22 @@ import cartopy.crs as ccrs import cartopy.feature as cfeature -def vect_diff(uvel1,vvel1,uvel2,vvel2,angle,proj,case1,case2,TLAT,TLON): - uvel_rot1 = uvel1*np.cos(angle)-vvel1*np.sin(angle) - vvel_rot1 = uvel1*np.sin(angle)+vvel1*np.cos(angle) - uvel_rot2 = uvel2*np.cos(angle)-vvel2*np.sin(angle) - vvel_rot2 = uvel2*np.sin(angle)+vvel2*np.cos(angle) - - speed1 = np.sqrt(uvel1*uvel1+vvel1*vvel1) - speed2 = np.sqrt(uvel2*uvel2+vvel2*vvel2) - - uvel_diff = uvel_rot2-uvel_rot1 - vvel_diff = vvel_rot2-vvel_rot1 - speed_diff = speed2-speed1 - + +def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON): + uvel_rot1 = uvel1 * np.cos(angle) - vvel1 * np.sin(angle) + vvel_rot1 = uvel1 * np.sin(angle) + vvel1 * np.cos(angle) + uvel_rot2 = uvel2 * np.cos(angle) - vvel2 * np.sin(angle) + vvel_rot2 = uvel2 * np.sin(angle) + vvel2 * np.cos(angle) + + speed1 = np.sqrt(uvel1 * uvel1 + vvel1 * vvel1) + speed2 = np.sqrt(uvel2 * uvel2 + vvel2 * vvel2) + + uvel_diff = uvel_rot2 - uvel_rot1 + vvel_diff = vvel_rot2 - vvel_rot1 + speed_diff = speed2 - speed1 + # make circular boundary for polar stereographic circular plots - theta = np.linspace(0, 2*np.pi, 100) + theta = np.linspace(0, 2 * np.pi, 100) center, radius = [0.5, 0.5], 0.5 verts = np.vstack([np.sin(theta), np.cos(theta)]).T circle = mpath.Path(verts * radius + center) @@ -30,97 +30,148 @@ def vect_diff(uvel1,vvel1,uvel2,vvel2,angle,proj,case1,case2,TLAT,TLON): fig = plt.figure(tight_layout=True) gs = GridSpec(2, 4) - if (proj == "N"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,:2], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + if proj == "N": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, :2], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed1, - vmin = 0., - vmax = 0.5, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed1, + vmin=0.0, + vmax=0.5, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) intv = 5 ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_rot1[::intv,::intv].values,vvel_rot1[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_rot1[::intv, ::intv].values, + vvel_rot1[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) - - if (proj == "N"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[0,2:], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - + qk = ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) + + if proj == "N": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[0, 2:], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed2, - vmin = 0., - vmax = 0.5, - cmap="tab20", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed2, + vmin=0.0, + vmax=0.5, + cmap="tab20", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case1, fontsize=10) intv = 5 ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_rot2[::intv,::intv].values,vvel_rot2[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_rot2[::intv, ::intv].values, + vvel_rot2[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) - - if (proj == "N"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.NorthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) - if (proj == "S"): - ax = fig.add_subplot(gs[1,1:3], projection=ccrs.SouthPolarStereo()) - # sets the latitude / longitude boundaries of the plot - ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) - + qk = ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) + + if proj == "N": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.NorthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, 90, 45], crs=ccrs.PlateCarree()) + if proj == "S": + ax = fig.add_subplot(gs[1, 1:3], projection=ccrs.SouthPolarStereo()) + # sets the latitude / longitude boundaries of the plot + ax.set_extent([0.005, 360, -90, -45], crs=ccrs.PlateCarree()) + ax.set_boundary(circle, transform=ax.transAxes) - ax.add_feature(cfeature.LAND,zorder=100,edgecolor='k') - - this=ax.pcolormesh(TLON, - TLAT, - speed_diff, - vmin = -0.2, - vmax = 0.2, - cmap="seismic", - transform=ccrs.PlateCarree()) - plt.colorbar(this,orientation='vertical',fraction=0.04,pad=0.01) - plt.title(case2+"-"+case1,fontsize=10) + ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") + + this = ax.pcolormesh( + TLON, + TLAT, + speed_diff, + vmin=-0.2, + vmax=0.2, + cmap="seismic", + transform=ccrs.PlateCarree(), + ) + plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) + plt.title(case2 + "-" + case1, fontsize=10) intv = 5 ## add vectors - Q = ax.quiver(TLON[::intv,::intv].values,TLAT[::intv,::intv].values, - uvel_diff[::intv,::intv].values,vvel_diff[::intv,::intv].values, - color = 'black', scale=1., - transform=ccrs.PlateCarree()) + Q = ax.quiver( + TLON[::intv, ::intv].values, + TLAT[::intv, ::intv].values, + uvel_diff[::intv, ::intv].values, + vvel_diff[::intv, ::intv].values, + color="black", + scale=1.0, + transform=ccrs.PlateCarree(), + ) units = "cm/s" - qk = ax.quiverkey(Q,0.85,0.025,0.10,r'10 '+units,labelpos='S', coordinates='axes',color='black',zorder=2) + qk = ax.quiverkey( + Q, + 0.85, + 0.025, + 0.10, + r"10 " + units, + labelpos="S", + coordinates="axes", + color="black", + zorder=2, + ) plt.suptitle("Velocity m/s") From 630031e2eeb06b7ba55848fb10b1740ed2fa6f41 Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:12:42 -0600 Subject: [PATCH 04/23] one last trailing whitespace --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7718e02..d60a4ce 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Furthermore, to clear the `computed_notebooks` folder which was generated by the $ cupid-clear ``` -This will clear the `computed_notebooks` folder which is at the location pointed to by the `run_dir` variable in the `config.yml` file. +This will clear the `computed_notebooks` folder which is at the location pointed to by the `run_dir` variable in the `config.yml` file. ### CUPiD Options From 2a1215ac7cc71013a90b46e40930ba2f3cb3ecba Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:15:33 -0600 Subject: [PATCH 05/23] end of file fix --- docs/NCARtips.rst | 3 ++- docs/index.rst | 1 - environments/docs.yml | 3 ++- examples/coupled_model/config.yml | 6 ------ 4 files changed, 4 insertions(+), 9 deletions(-) diff --git a/docs/NCARtips.rst b/docs/NCARtips.rst index cef2ff0..6232c7b 100644 --- a/docs/NCARtips.rst +++ b/docs/NCARtips.rst @@ -1,2 +1,3 @@ .. include:: NCAR_tips.md - :parser: myst \ No newline at end of file + :parser: myst + diff --git a/docs/index.rst b/docs/index.rst index 8a1fef1..0f8629b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,4 +10,3 @@ CUPiD Documentation .. include:: README.md :parser: myst - diff --git a/environments/docs.yml b/environments/docs.yml index ff4c40b..9c04200 100644 --- a/environments/docs.yml +++ b/environments/docs.yml @@ -9,4 +9,5 @@ dependencies: - sphinx-book-theme - myst-nb - sphinx-design - - nbsphinx \ No newline at end of file + - nbsphinx + diff --git a/examples/coupled_model/config.yml b/examples/coupled_model/config.yml index b6ec15f..cc24904 100644 --- a/examples/coupled_model/config.yml +++ b/examples/coupled_model/config.yml @@ -201,9 +201,3 @@ book_config_keys: # Other keys can be added here, see https://jupyterbook.org/en/stable/customize/config.html ### for many more options - - - - - - From ec8003c6a0ade760337814763371a09e705ddd83 Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:21:01 -0600 Subject: [PATCH 06/23] end of files --- docs/NCARtips.rst | 1 - environments/docs.yml | 1 - examples/coupled_model/config.yml | 1 - 3 files changed, 3 deletions(-) diff --git a/docs/NCARtips.rst b/docs/NCARtips.rst index 6232c7b..8020125 100644 --- a/docs/NCARtips.rst +++ b/docs/NCARtips.rst @@ -1,3 +1,2 @@ .. include:: NCAR_tips.md :parser: myst - diff --git a/environments/docs.yml b/environments/docs.yml index 9c04200..381c085 100644 --- a/environments/docs.yml +++ b/environments/docs.yml @@ -10,4 +10,3 @@ dependencies: - myst-nb - sphinx-design - nbsphinx - diff --git a/examples/coupled_model/config.yml b/examples/coupled_model/config.yml index cc24904..4fdabc0 100644 --- a/examples/coupled_model/config.yml +++ b/examples/coupled_model/config.yml @@ -200,4 +200,3 @@ book_config_keys: # Other keys can be added here, see https://jupyterbook.org/en/stable/customize/config.html ### for many more options - From 94e3d5414601b3bbf2a31e7f87bd2ed860fa7585 Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:30:08 -0600 Subject: [PATCH 07/23] strings --- cupid/build.py | 4 ++-- cupid/clear.py | 18 +++++++++--------- cupid/read.py | 2 +- cupid/timeseries.py | 2 +- docs/conf.py | 6 +++--- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index d5778b8..cc60257 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -47,9 +47,9 @@ def build(config_path): # Originally used this code to copy jupyter book HTML to a location to host it online - # if 'publish_location' in control: + # if "publish_location" in control: - # user = os.environ.get('USER') + # user = os.environ.get("USER") # remote_mach = control["publish_location"]["remote_mach"] # remote_dir = control["publish_location"]["remote_dir"] # this seems more complicated than expected...people have mentioned paramiko library? diff --git a/cupid/clear.py b/cupid/clear.py index ea886fd..8ec1f86 100755 --- a/cupid/clear.py +++ b/cupid/clear.py @@ -1,10 +1,10 @@ #!/usr/bin/env python """ -This script provides functionality to clear the contents of the 'computed_notebooks' folder -at the location specified by the 'run_dir' variable in the CONFIG_PATH. +This script provides functionality to clear the contents of the "computed_notebooks" folder +at the location specified by the "run_dir" variable in the CONFIG_PATH. The main function `clear()` takes the path to the configuration file as input, reads the config file -to obtain the 'run_dir' variable, and then deletes the contents of the 'computed_notebooks' folder +to obtain the "run_dir" variable, and then deletes the contents of the "computed_notebooks" folder at that location. """ @@ -18,7 +18,7 @@ def read_config_file(config_path): """ Given the file path to the configuration file, this function reads the config file content and - returns the val of the run_dir string with '/computed_notebooks' appended to it + returns the val of the run_dir string with `/computed_notebooks` appended to it Args: CONFIG_PATH: str, path to configuration file (default config.yml) @@ -31,11 +31,11 @@ def read_config_file(config_path): run_dir = control["data_sources"].get("run_dir", None) if run_dir: - # Append '/computed_notebooks' to the run_dir value if it is not empty + # Append `/computed_notebooks` to the run_dir value if it is not empty full_path = os.path.join(run_dir, "computed_notebooks") return full_path - # else run_dir is empty/wasn't found in config file so return error + # else run_dir is empty/was not found in config file so return error raise ValueError("'run_dir' was empty/not found in the config file.") @@ -43,14 +43,14 @@ def read_config_file(config_path): @click.argument("config_path", default="config.yml") # Entry point to this script def clear(config_path): - """Clears the contents of the 'computed_notebooks' folder at the location - specified by the 'run_dir' variable in the CONFIG_PATH. + """Clears the contents of the "computed_notebooks" folder at the location + specified by the "run_dir" variable in the CONFIG_PATH. Args: CONFIG_PATH - The path to the configuration file. """ run_dir = read_config_file(config_path) - # Delete the 'computed_notebooks' folder and all the contents inside of it + # Delete the "computed_notebooks" folder and all the contents inside of it shutil.rmtree(run_dir) print(f"All contents in {run_dir} have been cleared.") diff --git a/cupid/read.py b/cupid/read.py index 03ec029..617bd65 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -22,7 +22,7 @@ def get_collection(path_to_catalog, **kwargs): """Get collection of datasets from intake catalog""" cat = intake.open_esm_datastore(path_to_catalog) ### note that the json file points to the csv, so the path that the - ### yaml file contains doesn't actually get used. this can cause issues + ### yaml file contains does not actually get used. this can cause issues cat_subset = cat.search(**kwargs) diff --git a/cupid/timeseries.py b/cupid/timeseries.py index 6315c16..a8793bf 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -221,7 +221,7 @@ def create_time_series( # create copy of var list that can be modified for derivable variables if diag_var_list == ["process_all"]: print("generating time series for all variables") - # TODO: this doesn't seem to be working for ocn... + # TODO: this does not seem to be working for ocn... diag_var_list = hist_file_var_list for var in diag_var_list: if var not in hist_file_var_list: diff --git a/docs/conf.py b/docs/conf.py index 39514e0..7afea9b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom # ones. extensions = [ @@ -85,7 +85,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] +# source_suffix = [".rst", ".md"] source_suffix = { ".rst": "restructuredtext", ".ipynb": "myst-nb", @@ -106,7 +106,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = dict( - # analytics_id='' this is configured in rtfd.io + # analytics_id="" this is configured in rtfd.io # canonical_url="", repository_url="https://github.com/NCAR/CUPiD", repository_branch="main", From 77d600e9bcc6a71d39b4dd2703d69ce6a4fa26d1 Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:41:38 -0600 Subject: [PATCH 08/23] reorder imports --- cupid/build.py | 4 ++-- cupid/clear.py | 2 ++ cupid/read.py | 3 ++- cupid/run.py | 1 + cupid/timeseries.py | 4 ++-- cupid/util.py | 3 ++- docs/conf.py | 5 +++-- examples/nblibrary/ice/plot_diff.py | 3 ++- examples/nblibrary/ice/vect_diff.py | 5 +++-- 9 files changed, 19 insertions(+), 11 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index cc60257..336f081 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -15,11 +15,11 @@ None """ -import click import subprocess import sys -import yaml +import click +import yaml @click.command() @click.argument("config_path", default="config.yml") diff --git a/cupid/clear.py b/cupid/clear.py index 8ec1f86..fe02ece 100755 --- a/cupid/clear.py +++ b/cupid/clear.py @@ -11,7 +11,9 @@ import os import shutil + import click + import cupid.util diff --git a/cupid/read.py b/cupid/read.py index 617bd65..0249f39 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -7,9 +7,10 @@ intake catalog based on specified criteria. """ -import intake import yaml +import intake + def read_yaml(path_to_yaml): """Read yaml file and return data from loaded yaml file""" diff --git a/cupid/run.py b/cupid/run.py index 536ccc5..4902ba0 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -24,6 +24,7 @@ import os import warnings + import click import intake import ploomber diff --git a/cupid/timeseries.py b/cupid/timeseries.py index a8793bf..1a61890 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -7,12 +7,12 @@ # ++++++++++++++++++++++++++++++ import glob -import multiprocessing as mp import os import subprocess from pathlib import Path -import xarray as xr +import multiprocessing as mp +import xarray as xr def call_ncrcat(cmd): """This is an internal function to `create_time_series` diff --git a/cupid/util.py b/cupid/util.py index c8d8991..6cbbfdc 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -16,8 +16,9 @@ import os import sys -from pathlib import Path import warnings +from pathlib import Path + import jupyter_client import papermill as pm import ploomber diff --git a/docs/conf.py b/docs/conf.py index 7afea9b..9a7c286 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,10 +9,11 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -import os -import sys + import datetime +import os import re +import sys sys.path.insert(0, os.path.abspath("../..")) diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index b9c6a0c..60ebc6e 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -1,8 +1,9 @@ -import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt import matplotlib.path as mpath from matplotlib.gridspec import GridSpec +import numpy as np + import cartopy.crs as ccrs import cartopy.feature as cfeature diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index d373dcc..9a917c6 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -1,12 +1,13 @@ import numpy as np + import matplotlib as mpl -import matplotlib.pyplot as plt import matplotlib.path as mpath +import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec + import cartopy.crs as ccrs import cartopy.feature as cfeature - def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON): uvel_rot1 = uvel1 * np.cos(angle) - vvel1 * np.sin(angle) vvel_rot1 = uvel1 * np.sin(angle) + vvel1 * np.cos(angle) From e4a62069abdd5825d685a7f195eff04aa1db25ec Mon Sep 17 00:00:00 2001 From: Teagan Date: Wed, 15 May 2024 16:49:41 -0600 Subject: [PATCH 09/23] adjust import order --- cupid/build.py | 1 - cupid/read.py | 3 +-- cupid/run.py | 3 +-- cupid/timeseries.py | 2 +- cupid/util.py | 2 +- examples/nblibrary/ice/plot_diff.py | 5 ++--- examples/nblibrary/ice/vect_diff.py | 8 +++----- 7 files changed, 9 insertions(+), 15 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index 336f081..68fdb67 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -17,7 +17,6 @@ import subprocess import sys - import click import yaml diff --git a/cupid/read.py b/cupid/read.py index 0249f39..617bd65 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -7,9 +7,8 @@ intake catalog based on specified criteria. """ -import yaml - import intake +import yaml def read_yaml(path_to_yaml): diff --git a/cupid/run.py b/cupid/run.py index 4902ba0..302dae8 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -24,12 +24,11 @@ import os import warnings - import click import intake import ploomber -import cupid.util import cupid.timeseries +import cupid.util CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) diff --git a/cupid/timeseries.py b/cupid/timeseries.py index 1a61890..67f120a 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -7,10 +7,10 @@ # ++++++++++++++++++++++++++++++ import glob +import multiprocessing as mp import os import subprocess from pathlib import Path -import multiprocessing as mp import xarray as xr diff --git a/cupid/util.py b/cupid/util.py index 6cbbfdc..1cf2639 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -22,9 +22,9 @@ import jupyter_client import papermill as pm import ploomber +import yaml from papermill.engines import NBClientEngine from jinja2 import Template -import yaml class MarkdownJinjaEngine(NBClientEngine): diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index 60ebc6e..d12c40a 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -1,12 +1,11 @@ +import cartopy.crs as ccrs +import cartopy.feature as cfeature import matplotlib as mpl import matplotlib.pyplot as plt import matplotlib.path as mpath from matplotlib.gridspec import GridSpec import numpy as np -import cartopy.crs as ccrs -import cartopy.feature as cfeature - def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): # make circular boundary for polar stereographic circular plots diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 9a917c6..d6c7331 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -1,13 +1,11 @@ -import numpy as np - +import cartopy.crs as ccrs +import cartopy.feature as cfeature import matplotlib as mpl import matplotlib.path as mpath import matplotlib.pyplot as plt +import numpy as np from matplotlib.gridspec import GridSpec -import cartopy.crs as ccrs -import cartopy.feature as cfeature - def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON): uvel_rot1 = uvel1 * np.cos(angle) - vvel1 * np.sin(angle) vvel_rot1 = uvel1 * np.sin(angle) + vvel1 * np.cos(angle) From f6be20b42e678ec27ace9eba4ed6a41579b42a18 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:13:30 -0600 Subject: [PATCH 10/23] some more formatting --- .pre-commit-config.yaml | 1 - cupid/read.py | 12 ++++++------ cupid/run.py | 3 ++- cupid/timeseries.py | 19 ++++++++++--------- cupid/util.py | 15 +++++++-------- docs/conf.py | 2 +- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79319ff..870b1d9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,6 @@ repos: - id: end-of-file-fixer - id: check-yaml - id: debug-statements - - id: double-quote-string-fixer - id: check-docstring-first - id: check-json diff --git a/cupid/read.py b/cupid/read.py index 617bd65..775226a 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -21,21 +21,21 @@ def read_yaml(path_to_yaml): def get_collection(path_to_catalog, **kwargs): """Get collection of datasets from intake catalog""" cat = intake.open_esm_datastore(path_to_catalog) - ### note that the json file points to the csv, so the path that the - ### yaml file contains does not actually get used. this can cause issues + # note that the json file points to the csv, so the path that the + # yaml file contains does not actually get used. this can cause issues cat_subset = cat.search(**kwargs) if "variable" in kwargs.keys(): # pylint: disable=invalid-name def preprocess(ds): - ## the double brackets return a Dataset rather than a DataArray - ## this is fragile and could cause issues, not sure what subsetting on time_bound does + # the double brackets return a Dataset rather than a DataArray + # this is fragile and could cause issues, not sure what subsetting on time_bound does return ds[[kwargs["variable"], "time_bound"]] - ## not sure what the chunking kwarg is doing here either + # not sure what the chunking kwarg is doing here either dsets = cat_subset.to_dataset_dict( - xarray_open_kwargs={"chunks": {"time": -1}}, preprocess=preprocess + xarray_open_kwargs={"chunks": {"time": -1}}, preprocess=preprocess, ) else: diff --git a/cupid/run.py b/cupid/run.py index 302dae8..fbdb353 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -1,5 +1,4 @@ #!/usr/bin/env python - """ Main script for running all notebooks and scripts specified in the configuration file. @@ -24,9 +23,11 @@ import os import warnings + import click import intake import ploomber + import cupid.timeseries import cupid.util diff --git a/cupid/timeseries.py b/cupid/timeseries.py index 67f120a..8f8443d 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -14,6 +14,7 @@ import xarray as xr + def call_ncrcat(cmd): """This is an internal function to `create_time_series` It just wraps the subprocess.call() function, so it can be @@ -124,7 +125,7 @@ def create_time_series( for year in range(start_year, end_year + 1): # Add files to main file list: for fname in starting_location.glob( - f"*{hist_str}.*{str(year).zfill(4)}*.nc" + f"*{hist_str}.*{str(year).zfill(4)}*.nc", ): files_list.append(fname) # End for @@ -135,7 +136,7 @@ def create_time_series( # Open an xarray dataset from the first model history file: hist_file_ds = xr.open_dataset( - hist_files[0], decode_cf=False, decode_times=False + hist_files[0], decode_cf=False, decode_times=False, ) # Get a list of data variables in the 1st hist file: @@ -227,7 +228,7 @@ def create_time_series( if var not in hist_file_var_list: if component == "ocn": print( - "ocean vars seem to not be present in all files and thus cause errors" + "ocean vars seem to not be present in all files and thus cause errors", ) continue if ( @@ -325,7 +326,7 @@ def create_time_series( if vars_to_derive: if component == "atm": derive_cam_variables( - vars_to_derive=vars_to_derive, ts_dir=ts_dir[case_idx] + vars_to_derive=vars_to_derive, ts_dir=ts_dir[case_idx], ) if serial: @@ -357,7 +358,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): # PRECT can be found by simply adding PRECL and PRECC # grab file names for the PRECL and PRECC files from the case ts directory if glob.glob(os.path.join(ts_dir, "*PRECC*")) and glob.glob( - os.path.join(ts_dir, "*PRECL*") + os.path.join(ts_dir, "*PRECL*"), ): constit_files = sorted(glob.glob(os.path.join(ts_dir, "*PREC*"))) else: @@ -374,7 +375,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): else: print( f"[{__name__}] Warning: PRECT file was found and overwrite is False" - + "Will use existing file." + + "Will use existing file.", ) continue # append PRECC to the file containing PRECL @@ -385,7 +386,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): # RESTOM = FSNT-FLNT # Have to be more precise than with PRECT because FSNTOA, FSTNC, etc are valid variables if glob.glob(os.path.join(ts_dir, "*.FSNT.*")) and glob.glob( - os.path.join(ts_dir, "*.FLNT.*") + os.path.join(ts_dir, "*.FLNT.*"), ): input_files = [ sorted(glob.glob(os.path.join(ts_dir, f"*.{v}.*"))) @@ -408,12 +409,12 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None): else: print( f"[{__name__}] Warning: RESTOM file was found and overwrite is False." - + "Will use existing file." + + "Will use existing file.", ) continue # append FSNT to the file containing FLNT os.system(f"ncks -A -v FLNT {constit_files[0]} {constit_files[1]}") # create new file with the difference of FLNT and FSNT os.system( - f"ncap2 -s 'RESTOM=(FSNT-FLNT)' {constit_files[1]} {derived_file}" + f"ncap2 -s 'RESTOM=(FSNT-FLNT)' {constit_files[1]} {derived_file}", ) diff --git a/cupid/util.py b/cupid/util.py index 1cf2639..22a1e88 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -23,9 +23,8 @@ import papermill as pm import ploomber import yaml -from papermill.engines import NBClientEngine from jinja2 import Template - +from papermill.engines import NBClientEngine class MarkdownJinjaEngine(NBClientEngine): """Class for using the Jinja Engine to run notebooks""" @@ -133,7 +132,7 @@ def setup_book(config_path): def create_ploomber_nb_task( - nb, info, cat_path, nb_path_root, output_dir, global_params, dag, dependency=None + nb, info, cat_path, nb_path_root, output_dir, global_params, dag, dependency=None, ): """ Creates a ploomber task for running a notebook, including necessary parameters. @@ -154,7 +153,7 @@ def create_ploomber_nb_task( parameter_groups = info["parameter_groups"] - ### passing in subset kwargs if they're provided + # passing in subset kwargs if they're provided if "subset" in info: subset_kwargs = info["subset"] else: @@ -170,7 +169,7 @@ def create_ploomber_nb_task( output_path = f"{output_dir}/{output_name}" - ### all of these things should be optional + # all of these things should be optional parms_in = dict(**default_params) parms_in.update(**global_params) parms_in.update(dict(**parms)) @@ -207,7 +206,7 @@ def create_ploomber_nb_task( def create_ploomber_script_task( - script, info, cat_path, nb_path_root, global_params, dag, dependency=None + script, info, cat_path, nb_path_root, global_params, dag, dependency=None, ): """ Creates a Ploomber task for running a script, including necessary parameters. @@ -230,7 +229,7 @@ def create_ploomber_script_task( parameter_groups = info["parameter_groups"] - ### passing in subset kwargs if they're provided + # passing in subset kwargs if they're provided if "subset" in info: subset_kwargs = info["subset"] else: @@ -246,7 +245,7 @@ def create_ploomber_script_task( # output_path = f"{output_dir}/{output_name}" - ### all of these things should be optional + # all of these things should be optional parms_in = dict(**default_params) parms_in.update(**global_params) parms_in.update(dict(**parms)) diff --git a/docs/conf.py b/docs/conf.py index 9a7c286..4df86b1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -40,7 +40,7 @@ project = "CUPiD" current_year = datetime.datetime.now().year -copyright = "{}, University Corporation for Atmospheric Research".format(current_year) +copyright = f"{current_year}, University Corporation for Atmospheric Research" author = "NSF NCAR" From 8989d74c961e3122e30ba16056dfe787a69dbc42 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:21:55 -0600 Subject: [PATCH 11/23] trailing commas --- cupid/build.py | 2 +- cupid/run.py | 14 +++++++------- cupid/util.py | 4 ++-- examples/nblibrary/ice/plot_diff.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index 68fdb67..5a09bdd 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -41,7 +41,7 @@ def build(config_path): subprocess.run(["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) subprocess.run( - ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"] + ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"], ) # Originally used this code to copy jupyter book HTML to a location to host it online diff --git a/cupid/run.py b/cupid/run.py index fbdb353..3dd3d9c 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -136,7 +136,7 @@ def run( output_dir = run_dir + "/computed_notebooks/" + control["data_sources"]["sname"] temp_data_path = run_dir + "/temp_data" nb_path_root = os.path.realpath( - os.path.expanduser(control["data_sources"]["nb_path_root"]) + os.path.expanduser(control["data_sources"]["nb_path_root"]), ) ##################################################################### @@ -148,7 +148,7 @@ def run( if "path_to_cat_json" in control["data_sources"]: full_cat_path = os.path.realpath( - os.path.expanduser(control["data_sources"]["path_to_cat_json"]) + os.path.expanduser(control["data_sources"]["path_to_cat_json"]), ) full_cat = intake.open_esm_datastore(full_cat_path) @@ -160,7 +160,7 @@ def run( # This pulls out the name of the catalog from the path cat_subset_name = full_cat_path.split("/")[-1].split(".")[0] + "_subset" cat_subset.serialize( - directory=temp_data_path, name=cat_subset_name, catalog_type="file" + directory=temp_data_path, name=cat_subset_name, catalog_type="file", ) cat_path = temp_data_path + "/" + cat_subset_name + ".json" else: @@ -192,7 +192,7 @@ def run( all_nbs[nb]["output_dir"] = output_dir + "/" + comp_name elif comp_bool and not all: warnings.warn( - f"No notebooks for {comp_name} component specified in config file." + f"No notebooks for {comp_name} component specified in config file.", ) # Checking for existence of environments @@ -203,7 +203,7 @@ def run( warnings.warn( f"Environment {bad_env} specified for {nb}.ipynb could not be found;"+ f" {nb}.ipynb will not be run."+ - f"See README.md for environment installation instructions." + f"See README.md for environment installation instructions.", ) all_nbs.pop(nb) @@ -235,7 +235,7 @@ def run( all_scripts[script]["nb_path_root"] = nb_path_root + "/" + comp_name elif comp_bool and not all: warnings.warn( - f"No scripts for {comp_name} component specified in config file." + f"No scripts for {comp_name} component specified in config file.", ) # Checking for existence of environments @@ -245,7 +245,7 @@ def run( bad_env = info["kernel_name"] warnings.warn( f"Environment {bad_env} specified for {script}.py could not be found;"+ - f"{script}.py will not be run." + f"{script}.py will not be run.", ) all_scripts.pop(script) diff --git a/cupid/util.py b/cupid/util.py index 22a1e88..e740506 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -63,7 +63,7 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No conda environment specified for {nb}.ipynb and no default kernel set, will use cupid-analysis environment." + f"No conda environment specified for {nb}.ipynb and no default kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = ( @@ -78,7 +78,7 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No environment specified for {script}.py and no default kernel set, will use cupid-analysis environment." + f"No environment specified for {script}.py and no default kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = ( diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index d12c40a..44e0128 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -38,7 +38,7 @@ def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): field_std = field_diff.std() this = ax.pcolormesh( - TLON, TLAT, field1, norm=norm, cmap="tab20", transform=ccrs.PlateCarree() + TLON, TLAT, field1, norm=norm, cmap="tab20", transform=ccrs.PlateCarree(), ) plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) plt.title(case1, fontsize=10) @@ -56,7 +56,7 @@ def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") this = ax.pcolormesh( - TLON, TLAT, field2, norm=norm, cmap="tab20", transform=ccrs.PlateCarree() + TLON, TLAT, field2, norm=norm, cmap="tab20", transform=ccrs.PlateCarree(), ) plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) plt.title(case2, fontsize=10) From 7c9936be40acbf10f8ccf00bdac34508e465276f Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:29:13 -0600 Subject: [PATCH 12/23] ran nbblack --- examples/nblibrary/atm/adf_quick_run.ipynb | 316 +++++++++------- examples/nblibrary/ice/seaice.ipynb | 360 +++++++++++-------- examples/nblibrary/lnd/land_comparison.ipynb | 107 +++--- examples/nblibrary/ocn/ocean_surface.ipynb | 74 ++-- 4 files changed, 474 insertions(+), 383 deletions(-) diff --git a/examples/nblibrary/atm/adf_quick_run.ipynb b/examples/nblibrary/atm/adf_quick_run.ipynb index 93c28d5..115369a 100644 --- a/examples/nblibrary/atm/adf_quick_run.ipynb +++ b/examples/nblibrary/atm/adf_quick_run.ipynb @@ -52,7 +52,7 @@ "### default parameters\n", "# adf_path = \"../../externals/ADF\"\n", "# config_path = \".\"\n", - "# config_fil_str = \"config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae_numcin3.001_vs_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml\"\n" + "# config_fil_str = \"config_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae_numcin3.001_vs_f.cam6_3_119.FLTHIST_ne30.r328_gamma0.33_soae.001.yaml\"" ] }, { @@ -86,14 +86,14 @@ "outputs": [], "source": [ "# Determine ADF directory path\n", - "# If it is in your cwd, set adf_path = local_path, \n", + "# If it is in your cwd, set adf_path = local_path,\n", "# otherwise set adf_path appropriately\n", "\n", - "local_path = os.path.abspath('')\n", + "local_path = os.path.abspath(\"\")\n", "\n", "# Set up the ADF for your location/user name\n", - "#user = \"richling\" \n", - "#adf_path = f\"/glade/work/{user}/ADF/\"\n", + "# user = \"richling\"\n", + "# adf_path = f\"/glade/work/{user}/ADF/\"\n", "\n", "print(f\"current working directory = {local_path}\")\n", "print(f\"ADF path = {adf_path}\")" @@ -106,11 +106,11 @@ "metadata": {}, "outputs": [], "source": [ - "#set path to ADF lib\n", - "lib_path = os.path.join(adf_path,\"lib\")\n", + "# set path to ADF lib\n", + "lib_path = os.path.join(adf_path, \"lib\")\n", "print(f\"The lib scripts live here, right? {lib_path}\")\n", "\n", - "#Add paths to python path:\n", + "# Add paths to python path:\n", "sys.path.append(lib_path)" ] }, @@ -121,11 +121,11 @@ "metadata": {}, "outputs": [], "source": [ - "#set path to ADF plotting scripts directory\n", - "plotting_scripts_path = os.path.join(adf_path,\"scripts\",\"plotting\")\n", + "# set path to ADF plotting scripts directory\n", + "plotting_scripts_path = os.path.join(adf_path, \"scripts\", \"plotting\")\n", "print(f\"The plotting scripts live here, right? {plotting_scripts_path}\")\n", "\n", - "#Add paths to python path:\n", + "# Add paths to python path:\n", "sys.path.append(plotting_scripts_path)" ] }, @@ -145,7 +145,7 @@ "metadata": {}, "outputs": [], "source": [ - "config_file=os.path.join(config_path,config_fil_str)\n" + "config_file = os.path.join(config_path, config_fil_str)" ] }, { @@ -155,13 +155,13 @@ "metadata": {}, "outputs": [], "source": [ - "#import ADF diagnostics object\n", + "# import ADF diagnostics object\n", "from adf_diag import AdfDiag\n", "\n", "# If this fails, check your paths output in the cells above,\n", "# and that you are running the NPL (conda) Kernel\n", "# You can see all the paths being examined by un-commenting the following:\n", - "#sys.path" + "# sys.path" ] }, { @@ -171,7 +171,7 @@ "metadata": {}, "outputs": [], "source": [ - "#Initialize ADF object\n", + "# Initialize ADF object\n", "adf = AdfDiag(config_file)\n", "adf" ] @@ -205,10 +205,10 @@ }, "outputs": [], "source": [ - "#Create model time series.\n", + "# Create model time series.\n", "adf.create_time_series()\n", "\n", - "#Create model baseline time series (if needed):\n", + "# Create model baseline time series (if needed):\n", "if not adf.compare_obs:\n", " adf.create_time_series(baseline=True)" ] @@ -232,7 +232,7 @@ }, "outputs": [], "source": [ - "#Create model climatology (climo) files.\n", + "# Create model climatology (climo) files.\n", "adf.create_climo()" ] }, @@ -253,9 +253,9 @@ }, "outputs": [], "source": [ - "#Regrid model climatology files to match either\n", - "#observations or CAM baseline climatologies.\n", - "#This call uses the \"regridding_scripts\" specified in the config file:\n", + "# Regrid model climatology files to match either\n", + "# observations or CAM baseline climatologies.\n", + "# This call uses the \"regridding_scripts\" specified in the config file:\n", "adf.regrid_climo()" ] }, @@ -396,7 +396,7 @@ "source": [ "basic_info_dict = adf.read_config_var(\"diag_basic_info\")\n", "\n", - "for key,val in basic_info_dict.items():\n", + "for key, val in basic_info_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -417,7 +417,7 @@ "source": [ "test_dict = adf.read_config_var(\"diag_cam_climo\")\n", "\n", - "for key,val in test_dict.items():\n", + "for key, val in test_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -438,7 +438,7 @@ "source": [ "baseline_dict = adf.read_config_var(\"diag_cam_baseline_climo\")\n", "\n", - "for key,val in baseline_dict.items():\n", + "for key, val in baseline_dict.items():\n", " print(f\"{key}: {val}\")" ] }, @@ -469,8 +469,8 @@ "metadata": {}, "outputs": [], "source": [ - "#List of case names (list by default)\n", - "case_names = adf.get_cam_info(\"cam_case_name\",required=True)\n", + "# List of case names (list by default)\n", + "case_names = adf.get_cam_info(\"cam_case_name\", required=True)\n", "print(case_names)\n", "\n", "base_name = adf.get_baseline_info(\"cam_case_name\")\n", @@ -494,9 +494,9 @@ "metadata": {}, "outputs": [], "source": [ - "case_climo_loc = adf.get_cam_info('cam_climo_loc', required=True)\n", + "case_climo_loc = adf.get_cam_info(\"cam_climo_loc\", required=True)\n", "base_climo_loc = adf.get_baseline_info(\"cam_climo_loc\")\n", - "case_climo_loc,base_climo_loc" + "case_climo_loc, base_climo_loc" ] }, { @@ -573,14 +573,14 @@ "metadata": {}, "outputs": [], "source": [ - "case_names = adf.get_cam_info('cam_case_name', required=True)\n", + "case_names = adf.get_cam_info(\"cam_case_name\", required=True)\n", "case_names_len = len(case_names)\n", - "data_name = adf.get_baseline_info('cam_case_name', required=False)\n", + "data_name = adf.get_baseline_info(\"cam_case_name\", required=False)\n", "\n", "case_ts_locs = adf.get_cam_info(\"cam_ts_loc\", required=True)\n", "data_ts_loc = adf.get_baseline_info(\"cam_ts_loc\", required=False)\n", "\n", - "res = adf.variable_defaults # dict of variable-specific plot preferences\n", + "res = adf.variable_defaults # dict of variable-specific plot preferences\n", "# or an empty dictionary if use_defaults was not specified in YAML.\n", "\n", "start_year = adf.climo_yrs[\"syears\"]\n", @@ -607,12 +607,14 @@ " print(\"Input file list is empty.\")\n", " return None\n", " elif len(fils) > 1:\n", - " return xr.open_mfdataset(fils, combine='by_coords')\n", + " return xr.open_mfdataset(fils, combine=\"by_coords\")\n", " else:\n", " sfil = str(fils[0])\n", " return xr.open_dataset(sfil)\n", - " #End if\n", - "#End def" + " # End if\n", + "\n", + "\n", + "# End def" ] }, { @@ -622,42 +624,53 @@ "metadata": {}, "outputs": [], "source": [ - "def _data_calcs(ts_loc,var,subset=None):\n", + "def _data_calcs(ts_loc, var, subset=None):\n", " \"\"\"\n", " args\n", " ----\n", " - ts_loc: Path\n", " path to time series file\n", - " \n", + "\n", " - var: str\n", " name of variable\n", - " \n", - " - subset (optional): dict \n", + "\n", + " - subset (optional): dict\n", " lat/lon extents (south, north, east, west)\n", " \"\"\"\n", " fils = sorted(list(Path(ts_loc).glob(f\"*{var}*.nc\")))\n", "\n", " ts_ds = _load_dataset(fils)\n", - " \n", - " time = ts_ds['time']\n", - " time = xr.DataArray(ts_ds['time_bnds'].load().mean(dim='nbnd').values, dims=time.dims, attrs=time.attrs)\n", - " ts_ds['time'] = time\n", + "\n", + " time = ts_ds[\"time\"]\n", + " time = xr.DataArray(\n", + " ts_ds[\"time_bnds\"].load().mean(dim=\"nbnd\").values,\n", + " dims=time.dims,\n", + " attrs=time.attrs,\n", + " )\n", + " ts_ds[\"time\"] = time\n", " ts_ds.assign_coords(time=time)\n", " ts_ds = xr.decode_cf(ts_ds)\n", - " \n", + "\n", " if subset != None:\n", - " ts_ds = ts_ds.sel(lat=slice(subset[\"s\"],subset[\"n\"]), lon=slice(subset[\"w\"],subset[\"e\"])) \n", - " \n", + " ts_ds = ts_ds.sel(\n", + " lat=slice(subset[\"s\"], subset[\"n\"]), lon=slice(subset[\"w\"], subset[\"e\"])\n", + " )\n", + "\n", " data = ts_ds[var].squeeze()\n", " unit = data.units\n", - " \n", + "\n", " # global weighting\n", " w = np.cos(np.radians(data.lat))\n", - " avg = data.weighted(w).mean(dim=(\"lat\",\"lon\"))\n", - " \n", - " yrs = np.unique([str(val.item().timetuple().tm_year).zfill(4) for _,val in enumerate(ts_ds[\"time\"])])\n", + " avg = data.weighted(w).mean(dim=(\"lat\", \"lon\"))\n", "\n", - " return avg,yrs,unit" + " yrs = np.unique(\n", + " [\n", + " str(val.item().timetuple().tm_year).zfill(4)\n", + " for _, val in enumerate(ts_ds[\"time\"])\n", + " ]\n", + " )\n", + "\n", + " return avg, yrs, unit" ] }, { @@ -667,7 +680,7 @@ "metadata": {}, "outputs": [], "source": [ - "def ts_plot(ax, name, vals, yrs, unit, color_dict,linewidth=None,zorder=1):\n", + "def ts_plot(ax, name, vals, yrs, unit, color_dict, linewidth=None, zorder=1):\n", " \"\"\"\n", " args\n", " ----\n", @@ -675,15 +688,23 @@ " color and marker style for variable\n", " \"\"\"\n", "\n", - " ax.plot(yrs, vals, color_dict[\"marker\"], c=color_dict[\"color\"],label=name,linewidth=linewidth,zorder=zorder)\n", + " ax.plot(\n", + " yrs,\n", + " vals,\n", + " color_dict[\"marker\"],\n", + " c=color_dict[\"color\"],\n", + " label=name,\n", + " linewidth=linewidth,\n", + " zorder=zorder,\n", + " )\n", "\n", - " ax.set_xlabel(\"Years\",fontsize=15,labelpad=20)\n", - " ax.set_ylabel(unit,fontsize=15,labelpad=20) \n", + " ax.set_xlabel(\"Years\", fontsize=15, labelpad=20)\n", + " ax.set_ylabel(unit, fontsize=15, labelpad=20)\n", "\n", " # For the minor ticks, use no labels; default NullFormatter.\n", - " ax.tick_params(which='major', length=7)\n", - " ax.tick_params(which='minor', length=5)\n", - " \n", + " ax.tick_params(which=\"major\", length=7)\n", + " ax.tick_params(which=\"minor\", length=5)\n", + "\n", " return ax" ] }, @@ -695,58 +716,69 @@ "outputs": [], "source": [ "def plot_var_details(ax, var, vals_cases, vals_base):\n", - " \n", " mins = []\n", " maxs = []\n", - " for i,val in enumerate(vals_cases):\n", - "\n", + " for i, val in enumerate(vals_cases):\n", " mins.append(np.nanmin(vals_cases[i]))\n", " maxs.append(np.nanmax(vals_cases[i]))\n", "\n", " mins.append(np.nanmin(vals_base))\n", " maxs.append(np.nanmax(vals_base))\n", "\n", - " if var == \"SST\": \n", - " ax.set_ylabel(\"K\",fontsize=20,labelpad=12)\n", + " if var == \"SST\":\n", + " ax.set_ylabel(\"K\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_major_locator(MultipleLocator(1))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", - " \n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", + "\n", " if var == \"TS\":\n", - " ax.set_ylabel(\"K\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"K\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_minor_locator(MultipleLocator(0.5))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", "\n", " if var == \"ICEFRAC\":\n", - " ax.set_ylabel(\"frac\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"frac\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.1\n", - " ax.set_ylim(np.floor(min(mins)),np.ceil(max(maxs)))\n", - " ax.set_title(f\"Time Series LabSea: {var} - ANN\",loc=\"left\",fontsize=22)\n", + " ax.set_ylim(np.floor(min(mins)), np.ceil(max(maxs)))\n", + " ax.set_title(f\"Time Series LabSea: {var} - ANN\", loc=\"left\", fontsize=22)\n", "\n", " if var == \"RESTOM\":\n", - " ax.set_ylabel(\"W/m2\",fontsize=20,labelpad=12)\n", + " ax.set_ylabel(\"W/m2\", fontsize=20, labelpad=12)\n", " tick_spacing = 0.5\n", " ax.yaxis.set_minor_locator(MultipleLocator(0.1))\n", - " ax.set_title(f\"Time Series Global: {var} - ANN\",loc=\"left\",fontsize=22)\n", - " \n", + " ax.set_title(f\"Time Series Global: {var} - ANN\", loc=\"left\", fontsize=22)\n", + "\n", " # Set label to show if RESTOM is 1 or 5-yr avg\n", - " line_1yr = Line2D([], [], label='1-yr avg', color='k', linewidth=1,marker='*',) \n", - " line_5yr = Line2D([], [], label='5-yr avg', color='k', linewidth=1,)\n", - " ax.legend(handles=[line_1yr,line_5yr], bbox_to_anchor=(0.99, 0.99))\n", - " \n", + " line_1yr = Line2D(\n", + " [],\n", + " [],\n", + " label=\"1-yr avg\",\n", + " color=\"k\",\n", + " linewidth=1,\n", + " marker=\"*\",\n", + " )\n", + " line_5yr = Line2D(\n", + " [],\n", + " [],\n", + " label=\"5-yr avg\",\n", + " color=\"k\",\n", + " linewidth=1,\n", + " )\n", + " ax.legend(handles=[line_1yr, line_5yr], bbox_to_anchor=(0.99, 0.99))\n", + "\n", " # Add extra space on the y-axis, except for ICEFRAC\n", " if var != \"ICEFRAC\":\n", - " ax.set_ylim(np.floor(min(mins)),np.ceil(max(maxs))+tick_spacing)\n", - " \n", + " ax.set_ylim(np.floor(min(mins)), np.ceil(max(maxs)) + tick_spacing)\n", + "\n", " ax.yaxis.set_major_locator(MultipleLocator(tick_spacing))\n", - " \n", - " ax.tick_params(axis='y', which='major', labelsize=16)\n", - " ax.tick_params(axis='y', which='minor', labelsize=16)\n", - " \n", - " ax.tick_params(axis='x', which='major', labelsize=14)\n", - " ax.tick_params(axis='x', which='minor', labelsize=14)\n", - " \n", + "\n", + " ax.tick_params(axis=\"y\", which=\"major\", labelsize=16)\n", + " ax.tick_params(axis=\"y\", which=\"minor\", labelsize=16)\n", + "\n", + " ax.tick_params(axis=\"x\", which=\"major\", labelsize=14)\n", + " ax.tick_params(axis=\"x\", which=\"minor\", labelsize=14)\n", + "\n", " return ax" ] }, @@ -773,7 +805,7 @@ "metadata": {}, "outputs": [], "source": [ - "ts_var_list = [\"RESTOM\",\"TS\",\"ICEFRAC\"]" + "ts_var_list = [\"RESTOM\", \"TS\", \"ICEFRAC\"]" ] }, { @@ -791,19 +823,19 @@ "from matplotlib.ticker import MultipleLocator\n", "from matplotlib.lines import Line2D\n", "\n", - "fig = plt.figure(figsize=(30,15))\n", + "fig = plt.figure(figsize=(30, 15))\n", "\n", "# Change the layout/number of subplots based off number of variables desired\n", "rows = 2\n", "cols = 3\n", - "gs = fig.add_gridspec(rows, cols, hspace=.3, wspace=.2)\n", + "gs = fig.add_gridspec(rows, cols, hspace=0.3, wspace=0.2)\n", "\n", "# Rough subset for Lab Sea\n", - "w = -63.5+360\n", - "e = -47.5+360\n", + "w = -63.5 + 360\n", + "e = -47.5 + 360\n", "s = 53.5\n", "n = 65.5\n", - "subset = {\"s\":s,\"n\":n,\"e\":e,\"w\":w}\n", + "subset = {\"s\": s, \"n\": n, \"e\": e, \"w\": w}\n", "\n", "# Add more colors as needed for number of test cases\n", "# ** Baseline is already added as green dashed line in plotting function **\n", @@ -811,13 +843,12 @@ "colors = [\"k\", \"aqua\", \"orange\", \"b\", \"magenta\", \"goldenrod\", \"slategrey\", \"rosybrown\"]\n", "\n", "# Setup plotting\n", - "#---------------\n", + "# ---------------\n", "\n", "# Loop over variables:\n", - "for i,var in enumerate(ts_var_list):\n", - " \n", - " print(\"Plotting variable:\",var)\n", - " \n", + "for i, var in enumerate(ts_var_list):\n", + " print(\"Plotting variable:\", var)\n", + "\n", " if var == \"RESTOM\":\n", " ax = plt.subplot(gs[0, 0])\n", " if var == \"TS\":\n", @@ -826,76 +857,79 @@ " ax = plt.subplot(gs[0, 2])\n", "\n", " # Grab baseline case:\n", - " #--------------------\n", + " # --------------------\n", "\n", - " if var == \"RESTOM\": \n", - " avg_base_FSNT,yrs_base,unit = _data_calcs(data_ts_loc,'FSNT')\n", - " avg_base_FLNT,_,_ = _data_calcs(data_ts_loc,\"FLNT\")\n", + " if var == \"RESTOM\":\n", + " avg_base_FSNT, yrs_base, unit = _data_calcs(data_ts_loc, \"FSNT\")\n", + " avg_base_FLNT, _, _ = _data_calcs(data_ts_loc, \"FLNT\")\n", " if len(yrs_base) < 5:\n", - " print(f\"Not a lot of climo years for {data_name}, only doing 1-yr avg for RESTOM...\")\n", + " print(\n", + " f\"Not a lot of climo years for {data_name}, only doing 1-yr avg for RESTOM...\"\n", + " )\n", " FSNT_base = avg_base_FSNT\n", " FLNT_base = avg_base_FLNT\n", " else:\n", - " FSNT_base = avg_base_FSNT.rolling(time=60,center=True).mean()\n", - " FLNT_base = avg_base_FLNT.rolling(time=60,center=True).mean()\n", + " FSNT_base = avg_base_FSNT.rolling(time=60, center=True).mean()\n", + " FLNT_base = avg_base_FLNT.rolling(time=60, center=True).mean()\n", "\n", " avg_base = FSNT_base - FLNT_base\n", - " \n", - " if (var == \"TS\" or var == \"SST\"):\n", - " avg_base,yrs_base,unit = _data_calcs(data_ts_loc,var)\n", - " \n", + "\n", + " if var == \"TS\" or var == \"SST\":\n", + " avg_base, yrs_base, unit = _data_calcs(data_ts_loc, var)\n", + "\n", " if var == \"ICEFRAC\":\n", - " avg_base,yrs_base,unit = _data_calcs(data_ts_loc,var,subset)\n", - " \n", + " avg_base, yrs_base, unit = _data_calcs(data_ts_loc, var, subset)\n", + "\n", " # Get int of years for plotting on x-axis\n", " yrs_base_int = yrs_base.astype(int)\n", "\n", " # Create yearly averages\n", " vals_base = [avg_base.sel(time=i).mean() for i in yrs_base]\n", - " \n", + "\n", " # Plot baseline data\n", - " color_dict = {\"color\":\"g\",\"marker\":\"--\"}\n", + " color_dict = {\"color\": \"g\", \"marker\": \"--\"}\n", " ax = ts_plot(ax, data_name, vals_base, yrs_base_int, unit, color_dict)\n", "\n", " # Loop over test cases:\n", - " #----------------------\n", + " # ----------------------\n", " # Create lists to hold all sets of years (for each case) and\n", " # sets of var data (for each case)\n", " vals_cases = []\n", " yrs_cases = []\n", " for case_idx, case_name in enumerate(case_names):\n", - "\n", " if var == \"RESTOM\":\n", - " avg_case_FSNT,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],'FSNT')\n", - " avg_case_FLNT,_,_ = _data_calcs(case_ts_locs[case_idx],\"FLNT\")\n", + " avg_case_FSNT, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], \"FSNT\")\n", + " avg_case_FLNT, _, _ = _data_calcs(case_ts_locs[case_idx], \"FLNT\")\n", " if len(yrs_case) < 5:\n", - " print(f\"Not a lot of climo years for {case_name}, only doing 1-yr avg for RESTOM...\")\n", + " print(\n", + " f\"Not a lot of climo years for {case_name}, only doing 1-yr avg for RESTOM...\"\n", + " )\n", " FSNT_case = avg_case_FSNT\n", " FLNT_case = avg_case_FLNT\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-*\"}\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-*\"}\n", " else:\n", - " FSNT_case = avg_case_FSNT.rolling(time=60,center=True).mean()\n", - " FLNT_case = avg_case_FLNT.rolling(time=60,center=True).mean()\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", + " FSNT_case = avg_case_FSNT.rolling(time=60, center=True).mean()\n", + " FLNT_case = avg_case_FLNT.rolling(time=60, center=True).mean()\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", "\n", " avg_case = FSNT_case - FLNT_case\n", "\n", " if var == \"TS\":\n", - " avg_case,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],var)\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", - " \n", + " avg_case, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], var)\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", + "\n", " if var == \"ICEFRAC\":\n", - " avg_case,yrs_case,unit = _data_calcs(case_ts_locs[case_idx],var,subset)\n", - " color_dict = {\"color\":colors[case_idx],\"marker\":\"-\"}\n", - " \n", + " avg_case, yrs_case, unit = _data_calcs(case_ts_locs[case_idx], var, subset)\n", + " color_dict = {\"color\": colors[case_idx], \"marker\": \"-\"}\n", + "\n", " # Get yearly averages for all available years\n", " vals_case = [avg_case.sel(time=i).mean() for i in yrs_case]\n", " vals_cases.append(vals_case)\n", - " \n", + "\n", " # Get int of years for plotting on x-axis\n", " yrs_case_int = yrs_case.astype(int)\n", " yrs_cases.append(yrs_case_int)\n", - " \n", + "\n", " # Add case to plot (ax)\n", " ax = ts_plot(ax, case_name, vals_case, yrs_case_int, unit, color_dict)\n", "\n", @@ -904,7 +938,7 @@ " # Get variable details\n", " ax = plot_var_details(ax, var, vals_cases, vals_base)\n", "\n", - " #Grab all unique years and find min/max years\n", + " # Grab all unique years and find min/max years\n", " uniq_yrs = sorted(x for v in yrs_cases for x in v)\n", " max_year = int(max(uniq_yrs))\n", " min_year = int(min(uniq_yrs))\n", @@ -918,22 +952,22 @@ " first_year = 0\n", "\n", " ax.set_xlim(first_year, last_year)\n", - " ax.set_xlabel(\"Years\",fontsize=15,labelpad=20)\n", + " ax.set_xlabel(\"Years\", fontsize=15, labelpad=20)\n", " # Set the x-axis plot limits\n", " # to guarantee data from all cases (including baseline) are on plot\n", - " ax.set_xlim(min_year, max_year+1)\n", + " ax.set_xlim(min_year, max_year + 1)\n", "\n", " # x-axis ticks and numbers\n", - " if max_year-min_year > 120:\n", + " if max_year - min_year > 120:\n", " ax.xaxis.set_major_locator(MultipleLocator(20))\n", " ax.xaxis.set_minor_locator(MultipleLocator(10))\n", - " if 10 <= max_year-min_year <= 120:\n", + " if 10 <= max_year - min_year <= 120:\n", " ax.xaxis.set_major_locator(MultipleLocator(5))\n", " ax.xaxis.set_minor_locator(MultipleLocator(1))\n", - " if 0 < max_year-min_year < 10:\n", + " if 0 < max_year - min_year < 10:\n", " ax.xaxis.set_major_locator(MultipleLocator(1))\n", " ax.xaxis.set_minor_locator(MultipleLocator(1))\n", - " \n", + "\n", " # End for (case loop)\n", "# End for (variables loop)\n", "\n", @@ -941,13 +975,17 @@ "# Gather labels based on case names and plotted line format (color, style, etc)\n", "lines_labels = [ax.get_legend_handles_labels() for ax in fig.axes]\n", "lines, labels = [sum(lol, []) for lol in zip(*lines_labels)]\n", - "fig.legend(lines[:case_names_len+1], labels[:case_names_len+1],\n", - " loc=\"center left\",fontsize=18,\n", - " bbox_to_anchor=(0.365, 0.4,.02,.05)) #bbox_to_anchor(x0, y0, width, height)\n", + "fig.legend(\n", + " lines[: case_names_len + 1],\n", + " labels[: case_names_len + 1],\n", + " loc=\"center left\",\n", + " fontsize=18,\n", + " bbox_to_anchor=(0.365, 0.4, 0.02, 0.05),\n", + ") # bbox_to_anchor(x0, y0, width, height)\n", "\n", "fig.show()\n", "\n", - "#plt.savefig(\"TimeSeries_ANN.png\", facecolor='w',bbox_inches=\"tight\")" + "# plt.savefig(\"TimeSeries_ANN.png\", facecolor='w',bbox_inches=\"tight\")" ] }, { diff --git a/examples/nblibrary/ice/seaice.ipynb b/examples/nblibrary/ice/seaice.ipynb index 658a127..077cfd5 100644 --- a/examples/nblibrary/ice/seaice.ipynb +++ b/examples/nblibrary/ice/seaice.ipynb @@ -45,8 +45,11 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "serial = False # use dask LocalCluster\n", - "cases = [\"g.e23_a16g.GJRAv4.TL319_t232_zstar_N65.2024.004\",\"g.e23_a16g.GJRAv4.TL319_t232_hycom1_N75.2024.005\"]\n", + "serial = False # use dask LocalCluster\n", + "cases = [\n", + " \"g.e23_a16g.GJRAv4.TL319_t232_zstar_N65.2024.004\",\n", + " \"g.e23_a16g.GJRAv4.TL319_t232_hycom1_N75.2024.005\",\n", + "]\n", "\n", "lc_kwargs = {}\n", "\n", @@ -65,10 +68,10 @@ "outputs": [], "source": [ "# Spin up cluster (if running in parallel)\n", - "client=None\n", + "client = None\n", "if not serial:\n", - " cluster = LocalCluster(**lc_kwargs)\n", - " client = Client(cluster)\n", + " cluster = LocalCluster(**lc_kwargs)\n", + " client = Client(cluster)\n", "\n", "client" ] @@ -90,42 +93,72 @@ "cbegyr2 = f\"{begyr2:04d}\"\n", "cendyr2 = f\"{endyr2:04d}\"\n", "\n", - "ds1 = xr.open_mfdataset(CESM_output_dir+\"/\"+case1+\"/ts/\"+case1+\".cice.h.\"+\"*.\"+cbegyr1+\"01-\"+cendyr1+\"12.nc\",\n", - " data_vars='minimal', compat='override', coords='minimal')\n", - "ds2 = xr.open_mfdataset(CESM_output_dir+\"/\"+case2+\"/ts/\"+case2+\".cice.h.\"+\"*.\"+cbegyr2+\"01-\"+cendyr2+\"12.nc\",\n", - " data_vars='minimal', compat='override', coords='minimal')\n", - "\n", - "TLAT = ds1['TLAT']\n", - "TLON = ds1['TLON']\n", - "tarea = ds1['tarea']\n", + "ds1 = xr.open_mfdataset(\n", + " CESM_output_dir\n", + " + \"/\"\n", + " + case1\n", + " + \"/ts/\"\n", + " + case1\n", + " + \".cice.h.\"\n", + " + \"*.\"\n", + " + cbegyr1\n", + " + \"01-\"\n", + " + cendyr1\n", + " + \"12.nc\",\n", + " data_vars=\"minimal\",\n", + " compat=\"override\",\n", + " coords=\"minimal\",\n", + ")\n", + "ds2 = xr.open_mfdataset(\n", + " CESM_output_dir\n", + " + \"/\"\n", + " + case2\n", + " + \"/ts/\"\n", + " + case2\n", + " + \".cice.h.\"\n", + " + \"*.\"\n", + " + cbegyr2\n", + " + \"01-\"\n", + " + cendyr2\n", + " + \"12.nc\",\n", + " data_vars=\"minimal\",\n", + " compat=\"override\",\n", + " coords=\"minimal\",\n", + ")\n", + "\n", + "TLAT = ds1[\"TLAT\"]\n", + "TLON = ds1[\"TLON\"]\n", + "tarea = ds1[\"tarea\"]\n", "\n", "# Make a DataArray with the number of days in each month, size = len(time)\n", "month_length = ds1.time.dt.days_in_month\n", - "weights_monthly = month_length.groupby(\"time.year\") / month_length.groupby(\"time.year\").sum()\n", + "weights_monthly = (\n", + " month_length.groupby(\"time.year\") / month_length.groupby(\"time.year\").sum()\n", + ")\n", "\n", "\n", - "#seasons = xr.full_like(months, fill_value=\"none\", dtype=\"U4\")\n", - "#seasons.name = \"season\"\n", - "#seasons[months.isin([1, 2, 3])] = \"JFM\"\n", - "#seasons[months.isin([4, 5, 6])] = \"AMJ\"\n", - "#seasons[months.isin([7, 8, 9])] = \"JAS\"\n", - "#seasons[months.isin([10, 11, 12])] = \"OND\"\n", - "#weights_season = month_length.groupby(seasons) / month_length.groupby(seasons).sum()\n", + "# seasons = xr.full_like(months, fill_value=\"none\", dtype=\"U4\")\n", + "# seasons.name = \"season\"\n", + "# seasons[months.isin([1, 2, 3])] = \"JFM\"\n", + "# seasons[months.isin([4, 5, 6])] = \"AMJ\"\n", + "# seasons[months.isin([7, 8, 9])] = \"JAS\"\n", + "# seasons[months.isin([10, 11, 12])] = \"OND\"\n", + "# weights_season = month_length.groupby(seasons) / month_length.groupby(seasons).sum()\n", "\n", "ds1_ann = (ds1 * weights_monthly).resample(time=\"YS\").sum(dim=\"time\")\n", "ds2_ann = (ds2 * weights_monthly).resample(time=\"YS\").sum(dim=\"time\")\n", "\n", "\n", - "#ds1_seas = (ds1 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", - "#ds2_seas = (ds2 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", + "# ds1_seas = (ds1 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", + "# ds2_seas = (ds2 * weights_season).resample(time=\"QS-JAN\").sum(dim=\"time\")\n", "\n", - "with open('cice_masks.yml', 'r') as file:\n", + "with open(\"cice_masks.yml\", \"r\") as file:\n", " cice_masks = yaml.safe_load(file)\n", "\n", - "with open('cice_vars.yml', 'r') as file:\n", + "with open(\"cice_vars.yml\", \"r\") as file:\n", " cice_vars = yaml.safe_load(file)\n", "\n", - "print(ds1['aice'])\n" + "print(ds1[\"aice\"])" ] }, { @@ -147,13 +180,13 @@ "outputs": [], "source": [ "for var in cice_vars:\n", - " vmin=cice_vars[var][0]['levels'][0]\n", - " vmax=cice_vars[var][0]['levels'][-1]\n", - " levels = np.array(cice_vars[var][0]['levels'])\n", - " title=cice_vars[var][1]['title']\n", - " field1 = ds1_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " field2 = ds2_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " plot_diff(field1,field2,levels,case1,case2,title,\"N\",TLAT,TLON)" + " vmin = cice_vars[var][0][\"levels\"][0]\n", + " vmax = cice_vars[var][0][\"levels\"][-1]\n", + " levels = np.array(cice_vars[var][0][\"levels\"])\n", + " title = cice_vars[var][1][\"title\"]\n", + " field1 = ds1_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " field2 = ds2_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " plot_diff(field1, field2, levels, case1, case2, title, \"N\", TLAT, TLON)" ] }, { @@ -164,13 +197,13 @@ "outputs": [], "source": [ "for var in cice_vars:\n", - " vmin=cice_vars[var][0]['levels'][0]\n", - " vmax=cice_vars[var][0]['levels'][1]\n", - " levels = np.array(cice_vars[var][0]['levels'])\n", - " title=cice_vars[var][1]['title']\n", - " field1 = ds1_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " field2 = ds2_ann[var].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - " plot_diff(field1,field2,levels,case1,case2,title,\"S\",TLAT,TLON)" + " vmin = cice_vars[var][0][\"levels\"][0]\n", + " vmax = cice_vars[var][0][\"levels\"][1]\n", + " levels = np.array(cice_vars[var][0][\"levels\"])\n", + " title = cice_vars[var][1][\"title\"]\n", + " field1 = ds1_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " field2 = ds2_ann[var].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + " plot_diff(field1, field2, levels, case1, case2, title, \"S\", TLAT, TLON)" ] }, { @@ -180,43 +213,43 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", - "ds1_vhi = (tarea*ds1.hi).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhi = (tarea*ds2.hi).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhi = (tarea * ds1.hi).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhi = (tarea * ds2.hi).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "ds1_vhs = (tarea*ds1.hs).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhs = (tarea*ds2.hs).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhs = (tarea * ds1.hs).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhs = (tarea * ds2.hs).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "fig = plt.figure(figsize=(10,10),tight_layout=True)\n", + "fig = plt.figure(figsize=(10, 10), tight_layout=True)\n", "\n", - "ax = fig.add_subplot(3,1,1)\n", + "ax = fig.add_subplot(3, 1, 1)\n", "ds1_vhi.plot()\n", "ds2_vhi.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Sea Ice Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,2)\n", + "ax = fig.add_subplot(3, 1, 2)\n", "ds1_vhs.plot()\n", "ds2_vhs.plot()\n", "\n", - "plt.ylim((0,1))\n", + "plt.ylim((0, 1))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Snow Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,3)\n", + "ax = fig.add_subplot(3, 1, 3)\n", "ds1_area.plot()\n", "ds2_area.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"NH Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -226,43 +259,43 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area_ann = (tarea*ds1_ann['aice']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area_ann = (tarea*ds2_ann['aice']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area_ann = (tarea * ds1_ann[\"aice\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area_ann = (tarea * ds2_ann[\"aice\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", - "ds1_vhi_ann = (tarea*ds1_ann['hi']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhi_ann = (tarea*ds2_ann['hi']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhi_ann = (tarea * ds1_ann[\"hi\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhi_ann = (tarea * ds2_ann[\"hi\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "ds1_vhs_ann = (tarea*ds1_ann['hs']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", - "ds2_vhs_ann = (tarea*ds2_ann['hs']).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-13\n", + "ds1_vhs_ann = (tarea * ds1_ann[\"hs\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", + "ds2_vhs_ann = (tarea * ds2_ann[\"hs\"]).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-13\n", "\n", - "fig = plt.figure(figsize=(10,10),tight_layout=True)\n", + "fig = plt.figure(figsize=(10, 10), tight_layout=True)\n", "\n", - "ax = fig.add_subplot(3,1,1)\n", + "ax = fig.add_subplot(3, 1, 1)\n", "ds1_vhi_ann.plot()\n", "ds2_vhi_ann.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Sea Ice Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,2)\n", + "ax = fig.add_subplot(3, 1, 2)\n", "ds1_vhs_ann.plot()\n", "ds2_vhs_ann.plot()\n", "\n", - "plt.ylim((0,1))\n", + "plt.ylim((0, 1))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Snow Volume $m x 10^{13}$\")\n", - "plt.legend([case1,case2])\n", + "plt.legend([case1, case2])\n", "\n", - "ax = fig.add_subplot(3,1,3)\n", + "ax = fig.add_subplot(3, 1, 3)\n", "ds1_area_ann.plot()\n", "ds2_area_ann.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"NH Annual Mean Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -286,16 +319,16 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_area.plot()\n", "ds2_area.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"SH Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -305,16 +338,16 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area_ann = (tarea*ds1_ann.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area_ann = (tarea*ds2_ann.aice).where(TLAT<0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area_ann = (tarea * ds1_ann.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area_ann = (tarea * ds2_ann.aice).where(TLAT < 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_area_ann.plot()\n", "ds2_area_ann.plot()\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"SH Annual Mean Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -328,51 +361,62 @@ "source": [ "### Read in the NSIDC data from files\n", "\n", - "path_nsidc = '/glade/campaign/cesm/development/pcwg/ice/data/NSIDC_SeaIce_extent/'\n", - "\n", - "jan_nsidc = pd.read_csv(path_nsidc+'N_01_extent_v3.0.csv',na_values=['-99.9'])\n", - "feb_nsidc = pd.read_csv(path_nsidc+'N_02_extent_v3.0.csv',na_values=['-99.9'])\n", - "mar_nsidc = pd.read_csv(path_nsidc+'N_03_extent_v3.0.csv',na_values=['-99.9'])\n", - "apr_nsidc = pd.read_csv(path_nsidc+'N_04_extent_v3.0.csv',na_values=['-99.9'])\n", - "may_nsidc = pd.read_csv(path_nsidc+'N_05_extent_v3.0.csv',na_values=['-99.9'])\n", - "jun_nsidc = pd.read_csv(path_nsidc+'N_06_extent_v3.0.csv',na_values=['-99.9'])\n", - "jul_nsidc = pd.read_csv(path_nsidc+'N_07_extent_v3.0.csv',na_values=['-99.9'])\n", - "aug_nsidc = pd.read_csv(path_nsidc+'N_08_extent_v3.0.csv',na_values=['-99.9'])\n", - "sep_nsidc = pd.read_csv(path_nsidc+'N_09_extent_v3.0.csv',na_values=['-99.9'])\n", - "oct_nsidc = pd.read_csv(path_nsidc+'N_10_extent_v3.0.csv',na_values=['-99.9'])\n", - "nov_nsidc = pd.read_csv(path_nsidc+'N_11_extent_v3.0.csv',na_values=['-99.9'])\n", - "dec_nsidc = pd.read_csv(path_nsidc+'N_12_extent_v3.0.csv',na_values=['-99.9'])\n", - "\n", - "jan_area = jan_nsidc.iloc[:,5].values\n", - "feb_area = feb_nsidc.iloc[:,5].values\n", - "mar_area = mar_nsidc.iloc[:,5].values\n", - "apr_area = apr_nsidc.iloc[:,5].values\n", - "may_area = may_nsidc.iloc[:,5].values\n", - "jun_area = jun_nsidc.iloc[:,5].values\n", - "jul_area = jul_nsidc.iloc[:,5].values\n", - "aug_area = aug_nsidc.iloc[:,5].values\n", - "sep_area = sep_nsidc.iloc[:,5].values\n", - "oct_area = oct_nsidc.iloc[:,5].values\n", - "nov_area = nov_nsidc.iloc[:,5].values\n", - "dec_area = dec_nsidc.iloc[:,5].values\n", - "\n", - "jan_ext = jan_nsidc.iloc[:,4].values\n", - "feb_ext = feb_nsidc.iloc[:,4].values\n", - "mar_ext = mar_nsidc.iloc[:,4].values\n", - "apr_ext = apr_nsidc.iloc[:,4].values\n", - "may_ext = may_nsidc.iloc[:,4].values\n", - "jun_ext = jun_nsidc.iloc[:,4].values\n", - "jul_ext = jul_nsidc.iloc[:,4].values\n", - "aug_ext = aug_nsidc.iloc[:,4].values\n", - "sep_ext = sep_nsidc.iloc[:,4].values\n", - "oct_ext = oct_nsidc.iloc[:,4].values\n", - "nov_ext = nov_nsidc.iloc[:,4].values\n", - "dec_ext = dec_nsidc.iloc[:,4].values\n", + "path_nsidc = \"/glade/campaign/cesm/development/pcwg/ice/data/NSIDC_SeaIce_extent/\"\n", + "\n", + "jan_nsidc = pd.read_csv(path_nsidc + \"N_01_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "feb_nsidc = pd.read_csv(path_nsidc + \"N_02_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "mar_nsidc = pd.read_csv(path_nsidc + \"N_03_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "apr_nsidc = pd.read_csv(path_nsidc + \"N_04_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "may_nsidc = pd.read_csv(path_nsidc + \"N_05_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "jun_nsidc = pd.read_csv(path_nsidc + \"N_06_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "jul_nsidc = pd.read_csv(path_nsidc + \"N_07_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "aug_nsidc = pd.read_csv(path_nsidc + \"N_08_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "sep_nsidc = pd.read_csv(path_nsidc + \"N_09_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "oct_nsidc = pd.read_csv(path_nsidc + \"N_10_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "nov_nsidc = pd.read_csv(path_nsidc + \"N_11_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "dec_nsidc = pd.read_csv(path_nsidc + \"N_12_extent_v3.0.csv\", na_values=[\"-99.9\"])\n", + "\n", + "jan_area = jan_nsidc.iloc[:, 5].values\n", + "feb_area = feb_nsidc.iloc[:, 5].values\n", + "mar_area = mar_nsidc.iloc[:, 5].values\n", + "apr_area = apr_nsidc.iloc[:, 5].values\n", + "may_area = may_nsidc.iloc[:, 5].values\n", + "jun_area = jun_nsidc.iloc[:, 5].values\n", + "jul_area = jul_nsidc.iloc[:, 5].values\n", + "aug_area = aug_nsidc.iloc[:, 5].values\n", + "sep_area = sep_nsidc.iloc[:, 5].values\n", + "oct_area = oct_nsidc.iloc[:, 5].values\n", + "nov_area = nov_nsidc.iloc[:, 5].values\n", + "dec_area = dec_nsidc.iloc[:, 5].values\n", + "\n", + "jan_ext = jan_nsidc.iloc[:, 4].values\n", + "feb_ext = feb_nsidc.iloc[:, 4].values\n", + "mar_ext = mar_nsidc.iloc[:, 4].values\n", + "apr_ext = apr_nsidc.iloc[:, 4].values\n", + "may_ext = may_nsidc.iloc[:, 4].values\n", + "jun_ext = jun_nsidc.iloc[:, 4].values\n", + "jul_ext = jul_nsidc.iloc[:, 4].values\n", + "aug_ext = aug_nsidc.iloc[:, 4].values\n", + "sep_ext = sep_nsidc.iloc[:, 4].values\n", + "oct_ext = oct_nsidc.iloc[:, 4].values\n", + "nov_ext = nov_nsidc.iloc[:, 4].values\n", + "dec_ext = dec_nsidc.iloc[:, 4].values\n", "\n", "print(dec_ext)\n", - "nsidc_clim = [np.nanmean(jan_ext[0:35]),np.nanmean(feb_ext[0:35]),np.nanmean(mar_ext[0:35]),np.nanmean(apr_ext[0:35]),\n", - " np.nanmean(may_ext[0:35]),np.nanmean(jun_ext[0:35]),np.nanmean(jul_ext[0:35]),np.nanmean(aug_ext[0:35]),\n", - " np.nanmean(sep_ext[0:35]),np.nanmean(oct_ext[0:35]),np.nanmean(nov_ext[0:35]),np.nanmean(dec_ext[0:35])]\n", + "nsidc_clim = [\n", + " np.nanmean(jan_ext[0:35]),\n", + " np.nanmean(feb_ext[0:35]),\n", + " np.nanmean(mar_ext[0:35]),\n", + " np.nanmean(apr_ext[0:35]),\n", + " np.nanmean(may_ext[0:35]),\n", + " np.nanmean(jun_ext[0:35]),\n", + " np.nanmean(jul_ext[0:35]),\n", + " np.nanmean(aug_ext[0:35]),\n", + " np.nanmean(sep_ext[0:35]),\n", + " np.nanmean(oct_ext[0:35]),\n", + " np.nanmean(nov_ext[0:35]),\n", + " np.nanmean(dec_ext[0:35]),\n", + "]\n", "\n", "plt.plot(nsidc_clim)" ] @@ -384,28 +428,27 @@ "metadata": {}, "outputs": [], "source": [ + "aice1_month = ds1[\"aice\"].groupby(\"time.month\").mean(dim=\"time\", skipna=True)\n", + "aice2_month = ds2[\"aice\"].groupby(\"time.month\").mean(dim=\"time\", skipna=True)\n", "\n", - "aice1_month = ds1['aice'].groupby(\"time.month\").mean(dim=\"time\",skipna=True)\n", - "aice2_month = ds2['aice'].groupby(\"time.month\").mean(dim=\"time\",skipna=True)\n", + "mask_tmp1 = np.where(np.logical_and(aice1_month > 0.15, ds1[\"TLAT\"] > 0), 1.0, 0.0)\n", + "mask_tmp2 = np.where(np.logical_and(aice2_month > 0.15, ds1[\"TLAT\"] > 0), 1.0, 0.0)\n", "\n", - "mask_tmp1 = np.where(np.logical_and(aice1_month > 0.15, ds1['TLAT'] > 0), 1., 0.)\n", - "mask_tmp2 = np.where(np.logical_and(aice2_month > 0.15, ds1['TLAT'] > 0), 1., 0.)\n", + "mask_ext1 = xr.DataArray(data=mask_tmp1, dims=[\"month\", \"nj\", \"ni\"])\n", + "mask_ext2 = xr.DataArray(data=mask_tmp2, dims=[\"month\", \"nj\", \"ni\"])\n", "\n", - "mask_ext1 = xr.DataArray(data=mask_tmp1,dims=[\"month\",\"nj\", \"ni\"])\n", - "mask_ext2 = xr.DataArray(data=mask_tmp2,dims=[\"month\",\"nj\", \"ni\"])\n", "\n", - "\n", - "ext1 = (mask_ext1*tarea).sum(['ni','nj'])*1.0e-12\n", - "ext2 = (mask_ext2*tarea).sum(['ni','nj'])*1.0e-12\n", + "ext1 = (mask_ext1 * tarea).sum([\"ni\", \"nj\"]) * 1.0e-12\n", + "ext2 = (mask_ext2 * tarea).sum([\"ni\", \"nj\"]) * 1.0e-12\n", "\n", "plt.plot(ext1)\n", "plt.plot(ext2)\n", "plt.plot(nsidc_clim)\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"Climatological Seasonal Cycle Ice Extent $m x 10^{12}$\")\n", - "plt.legend([case1,case2,\"NSIDC\"])\n" + "plt.legend([case1, case2, \"NSIDC\"])" ] }, { @@ -415,8 +458,8 @@ "metadata": {}, "outputs": [], "source": [ - "ds1_area = (tarea*ds1.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_area = (tarea*ds2.aice).where(TLAT>0).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_area = (tarea * ds1.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_area = (tarea * ds2.aice).where(TLAT > 0).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_sep = ds1_area.sel(time=(ds1_area.time.dt.month == 9))\n", "ds2_sep = ds2_area.sel(time=(ds2_area.time.dt.month == 9))\n", @@ -425,10 +468,10 @@ "plt.plot(ds2_sep)\n", "plt.plot(sep_area)\n", "\n", - "plt.ylim((0,25))\n", + "plt.ylim((0, 25))\n", "plt.xlabel(\"Year\")\n", "plt.ylabel(\"Sea Ice Area $mx10^{12}$\")\n", - "plt.legend([case1,case2,\"NSIDC\"])" + "plt.legend([case1, case2, \"NSIDC\"])" ] }, { @@ -438,26 +481,25 @@ "metadata": {}, "outputs": [], "source": [ + "latm = cice_masks[\"Lab_lat\"]\n", + "lonm = cice_masks[\"Lab_lon\"]\n", "\n", - "latm = cice_masks['Lab_lat']\n", - "lonm = cice_masks['Lab_lon']\n", - "\n", - "lon = np.where(TLON < 0, TLON+360.,TLON)\n", + "lon = np.where(TLON < 0, TLON + 360.0, TLON)\n", "\n", - "mask1 = np.where(np.logical_and(TLAT > latm[0], TLAT < latm[1]),1.,0.)\n", - "mask2 = np.where(np.logical_or(lon > lonm[0], lon < lonm[1]),1.,0.)\n", - "mask = mask1*mask2\n", + "mask1 = np.where(np.logical_and(TLAT > latm[0], TLAT < latm[1]), 1.0, 0.0)\n", + "mask2 = np.where(np.logical_or(lon > lonm[0], lon < lonm[1]), 1.0, 0.0)\n", + "mask = mask1 * mask2\n", "\n", - "ds1_lab = (mask*tarea*ds1.aice).sum(dim=['nj','ni'])*1.0e-12\n", - "ds2_lab = (mask*tarea*ds2.aice).sum(dim=['nj','ni'])*1.0e-12\n", + "ds1_lab = (mask * tarea * ds1.aice).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", + "ds2_lab = (mask * tarea * ds2.aice).sum(dim=[\"nj\", \"ni\"]) * 1.0e-12\n", "\n", "ds1_lab.plot()\n", "ds2_lab.plot()\n", "\n", - "plt.ylim((0,10))\n", + "plt.ylim((0, 10))\n", "plt.xlabel(\"Month\")\n", "plt.ylabel(\"Labrador Sea Ice Area $m x 10^{12}$\")\n", - "plt.legend([case1,case2])" + "plt.legend([case1, case2])" ] }, { @@ -467,14 +509,16 @@ "metadata": {}, "outputs": [], "source": [ - "uvel1 = ds1_ann['uvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "vvel1 = ds1_ann['vvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "uvel2 = ds2_ann['uvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "vvel2 = ds2_ann['vvel'].isel(time=slice(-nyears,None)).mean(\"time\").squeeze()\n", - "ds_angle = xr.open_dataset(\"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/angle_tx2_3v2.nc\")\n", - "angle = ds_angle['ANGLE']\n", - "\n", - "vect_diff(uvel1,vvel1,uvel2,vvel2,angle,\"N\",case1,case2,TLAT,TLON)" + "uvel1 = ds1_ann[\"uvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "vvel1 = ds1_ann[\"vvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "uvel2 = ds2_ann[\"uvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "vvel2 = ds2_ann[\"vvel\"].isel(time=slice(-nyears, None)).mean(\"time\").squeeze()\n", + "ds_angle = xr.open_dataset(\n", + " \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/angle_tx2_3v2.nc\"\n", + ")\n", + "angle = ds_angle[\"ANGLE\"]\n", + "\n", + "vect_diff(uvel1, vvel1, uvel2, vvel2, angle, \"N\", case1, case2, TLAT, TLON)" ] }, { @@ -484,7 +528,7 @@ "metadata": {}, "outputs": [], "source": [ - "vect_diff(uvel1,vvel1,uvel2,vvel2,angle,\"S\",case1,case2,TLAT,TLON)" + "vect_diff(uvel1, vvel1, uvel2, vvel2, angle, \"S\", case1, case2, TLAT, TLON)" ] }, { diff --git a/examples/nblibrary/lnd/land_comparison.ipynb b/examples/nblibrary/lnd/land_comparison.ipynb index af87618..e32dca5 100644 --- a/examples/nblibrary/lnd/land_comparison.ipynb +++ b/examples/nblibrary/lnd/land_comparison.ipynb @@ -48,8 +48,11 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "type = ['1850pAD','1850pSASU']\n", - "cases = ['ctsm51d159_f45_GSWP3_bgccrop_1850pAD', 'ctsm51d159_f45_GSWP3_bgccrop_1850pSASU']" + "type = [\"1850pAD\", \"1850pSASU\"]\n", + "cases = [\n", + " \"ctsm51d159_f45_GSWP3_bgccrop_1850pAD\",\n", + " \"ctsm51d159_f45_GSWP3_bgccrop_1850pSASU\",\n", + "]" ] }, { @@ -61,13 +64,11 @@ "source": [ "# -- read only these variables from the whole netcdf files\n", "# average over time\n", - "def preprocess (ds):\n", - " variables = ['TOTECOSYSC', 'TOTVEGC','TOTSOMC',\n", - " 'SOM_PAS_C_vr']\n", + "def preprocess(ds):\n", + " variables = [\"TOTECOSYSC\", \"TOTVEGC\", \"TOTSOMC\", \"SOM_PAS_C_vr\"]\n", "\n", - " ds_new= ds[variables]\n", - " return ds_new\n", - "\n" + " ds_new = ds[variables]\n", + " return ds_new" ] }, { @@ -78,27 +79,31 @@ "outputs": [], "source": [ "for c in range(len(cases)):\n", - "\n", - " sim_files =[]\n", + " sim_files = []\n", " sim_path = f\"{CESM_output_dir}/{cases[c]}/lnd/hist\"\n", " sim_files.extend(sorted(glob(join(f\"{sim_path}/{cases[c]}.clm2.h0.*.nc\"))))\n", - " # subset last 5 years of data \n", + " # subset last 5 years of data\n", " sim_files = sim_files[-60:None]\n", " print(f\"All simulation files for {cases[c]}: [{len(sim_files)} files]\")\n", "\n", - " temp = xr.open_mfdataset(sim_files, decode_times=True, combine='by_coords',\n", - " parallel=False, preprocess=preprocess).mean('time')\n", - " \n", + " temp = xr.open_mfdataset(\n", + " sim_files,\n", + " decode_times=True,\n", + " combine=\"by_coords\",\n", + " parallel=False,\n", + " preprocess=preprocess,\n", + " ).mean(\"time\")\n", + "\n", " if c == 0:\n", " ds = temp\n", " else:\n", - " ds = xr.concat([ds, temp],'case')\n", - " \n", + " ds = xr.concat([ds, temp], \"case\")\n", + "\n", "ds = ds.assign_coords({\"case\": type})\n", "\n", "# Calculate differences\n", - "diff = ds.isel(case=1) - ds.isel(case=0) \n", - "rel_diff = diff / ds.isel(case=1) " + "diff = ds.isel(case=1) - ds.isel(case=0)\n", + "rel_diff = diff / ds.isel(case=1)" ] }, { @@ -116,24 +121,24 @@ "metadata": {}, "outputs": [], "source": [ - "plt.figure(figsize=[14,14])\n", - "var = ['TOTECOSYSC' ,'TOTVEGC','TOTSOMC']\n", - "#var = ['GPP' ,'ELAI','ALT']\n", + "plt.figure(figsize=[14, 14])\n", + "var = [\"TOTECOSYSC\", \"TOTVEGC\", \"TOTSOMC\"]\n", + "# var = ['GPP' ,'ELAI','ALT']\n", "i = 1\n", "for v in range(len(var)):\n", " plt.subplot(3, 2, i)\n", - " ds[var[v]].isel(case=1).plot(robust=True) \n", - " plt.title(\"pSASU \"+ var[v])\n", + " ds[var[v]].isel(case=1).plot(robust=True)\n", + " plt.title(\"pSASU \" + var[v])\n", " plt.xlabel(None)\n", " plt.ylabel(None)\n", - " i = i+1\n", - " \n", + " i = i + 1\n", + "\n", " plt.subplot(3, 2, i)\n", - " diff[var[v]].plot(robust=True) \n", - " plt.title(\"pSASU - pAD \"+ var[v]) \n", + " diff[var[v]].plot(robust=True)\n", + " plt.title(\"pSASU - pAD \" + var[v])\n", " plt.xlabel(None)\n", - " plt.ylabel(None) \n", - " i = i+1\n" + " plt.ylabel(None)\n", + " i = i + 1" ] }, { @@ -152,40 +157,40 @@ "metadata": {}, "outputs": [], "source": [ - "plt.figure(figsize=[10,5])\n", - "var = 'SOM_PAS_C_vr'\n", + "plt.figure(figsize=[10, 5])\n", + "var = \"SOM_PAS_C_vr\"\n", "plt.subplot(1, 4, 1)\n", - "ds[var].sel(lon=300,lat=-10, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Tropical')\n", - "plt.ylabel('depth (m)')\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "ds[var].sel(lon=300, lat=-10, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Tropical\")\n", + "plt.ylabel(\"depth (m)\")\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "\n", "plt.subplot(1, 4, 2)\n", - "ds[var].sel(lon=25,lat=50, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Temperate')\n", + "ds[var].sel(lon=25, lat=50, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Temperate\")\n", "plt.ylabel(None)\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "plt.subplot(1, 4, 3)\n", - "ds[var].sel(lon=155,lat=66, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Arctic')\n", + "ds[var].sel(lon=155, lat=66, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Arctic\")\n", "plt.ylabel(None)\n", - "plt.xscale('log',base=10) \n", - "#plt.ylim(6,0)\n", + "plt.xscale(\"log\", base=10)\n", + "# plt.ylim(6,0)\n", "\n", "\n", "plt.subplot(1, 4, 4)\n", - "ds[var].sel(lon=155,lat=66, method='nearest').plot(hue='case',y='levsoi') ;\n", - "plt.gca().invert_yaxis() ;\n", - "plt.title('Arctic')\n", + "ds[var].sel(lon=155, lat=66, method=\"nearest\").plot(hue=\"case\", y=\"levsoi\")\n", + "plt.gca().invert_yaxis()\n", + "plt.title(\"Arctic\")\n", "plt.ylabel(None)\n", - "#plt.ylim(6,0)" + "# plt.ylim(6,0)" ] } ], diff --git a/examples/nblibrary/ocn/ocean_surface.ipynb b/examples/nblibrary/ocn/ocean_surface.ipynb index 4a96320..5ab9314 100644 --- a/examples/nblibrary/ocn/ocean_surface.ipynb +++ b/examples/nblibrary/ocn/ocean_surface.ipynb @@ -65,7 +65,7 @@ "outputs": [], "source": [ "CESM_output_dir = \"/glade/campaign/cesm/development/cross-wg/diagnostic_framework/CESM_output_for_testing\"\n", - "serial = False # use dask LocalCluster\n", + "serial = False # use dask LocalCluster\n", "Case = \"b.e23_alpha16b.BLT1850.ne30_t232.054\"\n", "savefigs = False\n", "mom6_tools_config = {}\n", @@ -78,8 +78,8 @@ "metadata": {}, "outputs": [], "source": [ - "OUTDIR = f'{CESM_output_dir}/{Case}/ocn/hist/'\n", - "print('Output directory is:', OUTDIR)" + "OUTDIR = f\"{CESM_output_dir}/{Case}/ocn/hist/\"\n", + "print(\"Output directory is:\", OUTDIR)" ] }, { @@ -95,12 +95,14 @@ "# The following parameters must be set accordingly\n", "######################################################\n", "\n", + "\n", "# create an empty class object\n", "class args:\n", - " pass\n", + " pass\n", + "\n", "\n", - "args.start_date = mom6_tools_config['start_date']\n", - "args.end_date = mom6_tools_config['end_date']\n", + "args.start_date = mom6_tools_config[\"start_date\"]\n", + "args.end_date = mom6_tools_config[\"end_date\"]\n", "args.casename = Case\n", "args.native = f\"{Case}.{mom6_tools_config['Fnames']['native']}\"\n", "args.static = f\"{Case}.{mom6_tools_config['Fnames']['static']}\"\n", @@ -114,15 +116,15 @@ "metadata": {}, "outputs": [], "source": [ - "if not os.path.isdir('PNG/BLD'):\n", - " print('Creating a directory to place figures (PNG/BLD)... \\n')\n", - " os.system('mkdir -p PNG/BLD')\n", - "if not os.path.isdir('PNG/MLD'):\n", - " print('Creating a directory to place figures (PNG/MLD)... \\n')\n", - " os.system('mkdir -p PNG/MLD')\n", - "if not os.path.isdir('ncfiles'):\n", - " print('Creating a directory to place netcdf files (ncfiles)... \\n')\n", - " os.system('mkdir ncfiles') " + "if not os.path.isdir(\"PNG/BLD\"):\n", + " print(\"Creating a directory to place figures (PNG/BLD)... \\n\")\n", + " os.system(\"mkdir -p PNG/BLD\")\n", + "if not os.path.isdir(\"PNG/MLD\"):\n", + " print(\"Creating a directory to place figures (PNG/MLD)... \\n\")\n", + " os.system(\"mkdir -p PNG/MLD\")\n", + "if not os.path.isdir(\"ncfiles\"):\n", + " print(\"Creating a directory to place netcdf files (ncfiles)... \\n\")\n", + " os.system(\"mkdir ncfiles\")" ] }, { @@ -132,10 +134,10 @@ "outputs": [], "source": [ "# Spin up cluster (if running in parallel)\n", - "client=None\n", + "client = None\n", "if not serial:\n", - " cluster = LocalCluster(**lc_kwargs)\n", - " client = Client(cluster)\n", + " cluster = LocalCluster(**lc_kwargs)\n", + " client = Client(cluster)\n", "\n", "client" ] @@ -147,8 +149,8 @@ "outputs": [], "source": [ "# load mom6 grid\n", - "grd = MOM6grid(OUTDIR+args.static)\n", - "grd_xr = MOM6grid(OUTDIR+args.static, xrformat=True)" + "grd = MOM6grid(OUTDIR + args.static)\n", + "grd_xr = MOM6grid(OUTDIR + args.static, xrformat=True)" ] }, { @@ -157,21 +159,23 @@ "metadata": {}, "outputs": [], "source": [ - "print('Reading native dataset...')\n", + "print(\"Reading native dataset...\")\n", "startTime = datetime.now()\n", "\n", + "\n", "def preprocess(ds):\n", - " ''' Compute montly averages and return the dataset with variables'''\n", - " variables = ['oml','mlotst','tos','SSH', 'SSU', 'SSV', 'speed', 'time_bnds']\n", - " for v in variables:\n", - " if v not in ds.variables:\n", - " ds[v] = xr.zeros_like(ds.SSH)\n", - " return ds[variables]\n", + " \"\"\"Compute montly averages and return the dataset with variables\"\"\"\n", + " variables = [\"oml\", \"mlotst\", \"tos\", \"SSH\", \"SSU\", \"SSV\", \"speed\", \"time_bnds\"]\n", + " for v in variables:\n", + " if v not in ds.variables:\n", + " ds[v] = xr.zeros_like(ds.SSH)\n", + " return ds[variables]\n", + "\n", "\n", - "ds1 = xr.open_mfdataset(OUTDIR+args.native, parallel=False)\n", + "ds1 = xr.open_mfdataset(OUTDIR + args.native, parallel=False)\n", "ds = preprocess(ds1)\n", "\n", - "print('Time elasped: ', datetime.now() - startTime)" + "print(\"Time elasped: \", datetime.now() - startTime)" ] }, { @@ -180,7 +184,7 @@ "metadata": {}, "outputs": [], "source": [ - "print('Selecting data between {} and {}...'.format(args.start_date, args.end_date))\n", + "print(\"Selecting data between {} and {}...\".format(args.start_date, args.end_date))\n", "ds_sel = ds.sel(time=slice(args.start_date, args.end_date))" ] }, @@ -190,10 +194,10 @@ "metadata": {}, "outputs": [], "source": [ - "catalog = intake.open_catalog(mom6_tools_config['oce_cat'])\n", + "catalog = intake.open_catalog(mom6_tools_config[\"oce_cat\"])\n", "mld_obs = catalog[args.mld_obs].to_dask()\n", "# uncomment to list all datasets available\n", - "#list(catalog)" + "# list(catalog)" ] }, { @@ -211,7 +215,7 @@ "source": [ "%matplotlib inline\n", "# MLD\n", - "get_MLD(ds,'mlotst', mld_obs, grd, args)" + "get_MLD(ds, \"mlotst\", mld_obs, grd, args)" ] }, { @@ -227,7 +231,7 @@ "metadata": {}, "outputs": [], "source": [ - "get_BLD(ds, 'oml', grd, args)" + "get_BLD(ds, \"oml\", grd, args)" ] }, { @@ -237,7 +241,7 @@ "outputs": [], "source": [ "# SSH (not working)\n", - "#get_SSH(ds, 'SSH', grd, args)" + "# get_SSH(ds, 'SSH', grd, args)" ] } ], From 7c184212ac0eb1619fe016ca6d113fe5612c5fcc Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:31:29 -0600 Subject: [PATCH 13/23] ran black --- cupid/build.py | 1 + cupid/read.py | 3 ++- cupid/timeseries.py | 7 +++++-- cupid/util.py | 18 ++++++++++++++++-- examples/nblibrary/ice/plot_diff.py | 14 ++++++++++++-- examples/nblibrary/ice/vect_diff.py | 1 + 6 files changed, 37 insertions(+), 7 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index 5a09bdd..a550ad5 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -20,6 +20,7 @@ import click import yaml + @click.command() @click.argument("config_path", default="config.yml") def build(config_path): diff --git a/cupid/read.py b/cupid/read.py index 775226a..2a59078 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -35,7 +35,8 @@ def preprocess(ds): # not sure what the chunking kwarg is doing here either dsets = cat_subset.to_dataset_dict( - xarray_open_kwargs={"chunks": {"time": -1}}, preprocess=preprocess, + xarray_open_kwargs={"chunks": {"time": -1}}, + preprocess=preprocess, ) else: diff --git a/cupid/timeseries.py b/cupid/timeseries.py index 8f8443d..f60de0f 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -136,7 +136,9 @@ def create_time_series( # Open an xarray dataset from the first model history file: hist_file_ds = xr.open_dataset( - hist_files[0], decode_cf=False, decode_times=False, + hist_files[0], + decode_cf=False, + decode_times=False, ) # Get a list of data variables in the 1st hist file: @@ -326,7 +328,8 @@ def create_time_series( if vars_to_derive: if component == "atm": derive_cam_variables( - vars_to_derive=vars_to_derive, ts_dir=ts_dir[case_idx], + vars_to_derive=vars_to_derive, + ts_dir=ts_dir[case_idx], ) if serial: diff --git a/cupid/util.py b/cupid/util.py index e740506..1b617d2 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -26,6 +26,7 @@ from jinja2 import Template from papermill.engines import NBClientEngine + class MarkdownJinjaEngine(NBClientEngine): """Class for using the Jinja Engine to run notebooks""" @@ -132,7 +133,14 @@ def setup_book(config_path): def create_ploomber_nb_task( - nb, info, cat_path, nb_path_root, output_dir, global_params, dag, dependency=None, + nb, + info, + cat_path, + nb_path_root, + output_dir, + global_params, + dag, + dependency=None, ): """ Creates a ploomber task for running a notebook, including necessary parameters. @@ -206,7 +214,13 @@ def create_ploomber_nb_task( def create_ploomber_script_task( - script, info, cat_path, nb_path_root, global_params, dag, dependency=None, + script, + info, + cat_path, + nb_path_root, + global_params, + dag, + dependency=None, ): """ Creates a Ploomber task for running a script, including necessary parameters. diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index 44e0128..9882a7f 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -38,7 +38,12 @@ def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): field_std = field_diff.std() this = ax.pcolormesh( - TLON, TLAT, field1, norm=norm, cmap="tab20", transform=ccrs.PlateCarree(), + TLON, + TLAT, + field1, + norm=norm, + cmap="tab20", + transform=ccrs.PlateCarree(), ) plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) plt.title(case1, fontsize=10) @@ -56,7 +61,12 @@ def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): ax.add_feature(cfeature.LAND, zorder=100, edgecolor="k") this = ax.pcolormesh( - TLON, TLAT, field2, norm=norm, cmap="tab20", transform=ccrs.PlateCarree(), + TLON, + TLAT, + field2, + norm=norm, + cmap="tab20", + transform=ccrs.PlateCarree(), ) plt.colorbar(this, orientation="vertical", fraction=0.04, pad=0.01) plt.title(case2, fontsize=10) diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index d6c7331..5272fe8 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -6,6 +6,7 @@ import numpy as np from matplotlib.gridspec import GridSpec + def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON): uvel_rot1 = uvel1 * np.cos(angle) - vvel1 * np.sin(angle) vvel_rot1 = uvel1 * np.sin(angle) + vvel1 * np.cos(angle) From 9cb9c3dfd87f587614bd90766150319ed07a74c3 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:38:37 -0600 Subject: [PATCH 14/23] fix imports --- cupid/build.py | 2 ++ cupid/clear.py | 1 + cupid/read.py | 1 + cupid/run.py | 1 + cupid/timeseries.py | 1 + cupid/util.py | 1 + docs/conf.py | 1 + examples/nblibrary/ice/plot_diff.py | 6 ++++-- examples/nblibrary/ice/vect_diff.py | 8 +++++--- 9 files changed, 17 insertions(+), 5 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index a550ad5..7962170 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -14,9 +14,11 @@ Returns: None """ +from __future__ import annotations import subprocess import sys + import click import yaml diff --git a/cupid/clear.py b/cupid/clear.py index fe02ece..d72d1b7 100755 --- a/cupid/clear.py +++ b/cupid/clear.py @@ -8,6 +8,7 @@ at that location. """ +from __future__ import annotations import os import shutil diff --git a/cupid/read.py b/cupid/read.py index 2a59078..16af2bf 100644 --- a/cupid/read.py +++ b/cupid/read.py @@ -6,6 +6,7 @@ - get_collection(path_to_catalog, **kwargs): Get a collection of datasets from an intake catalog based on specified criteria. """ +from __future__ import annotations import intake import yaml diff --git a/cupid/run.py b/cupid/run.py index 3dd3d9c..ae9c3d9 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -20,6 +20,7 @@ -config_path Path to the YAML configuration file containing specifications for notebooks (default: config.yml) -h, --help Show this message and exit. """ +from __future__ import annotations import os import warnings diff --git a/cupid/timeseries.py b/cupid/timeseries.py index f60de0f..c71ae69 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -5,6 +5,7 @@ # ++++++++++++++++++++++++++++++ # Import standard python modules # ++++++++++++++++++++++++++++++ +from __future__ import annotations import glob import multiprocessing as mp diff --git a/cupid/util.py b/cupid/util.py index 1b617d2..0f774a7 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -13,6 +13,7 @@ - ManageCondaKernel: Class for managing conda kernels. - MarkdownJinjaEngine: Class for using the Jinja Engine to run notebooks. """ +from __future__ import annotations import os import sys diff --git a/docs/conf.py b/docs/conf.py index 4df86b1..eba9c81 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,6 +9,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. +from __future__ import annotations import datetime import os diff --git a/examples/nblibrary/ice/plot_diff.py b/examples/nblibrary/ice/plot_diff.py index 9882a7f..d6395de 100644 --- a/examples/nblibrary/ice/plot_diff.py +++ b/examples/nblibrary/ice/plot_diff.py @@ -1,10 +1,12 @@ +from __future__ import annotations + import cartopy.crs as ccrs import cartopy.feature as cfeature import matplotlib as mpl -import matplotlib.pyplot as plt import matplotlib.path as mpath -from matplotlib.gridspec import GridSpec +import matplotlib.pyplot as plt import numpy as np +from matplotlib.gridspec import GridSpec def plot_diff(field1, field2, levels, case1, case2, title, proj, TLAT, TLON): diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 5272fe8..1fdfe14 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import cartopy.crs as ccrs import cartopy.feature as cfeature import matplotlib as mpl @@ -55,7 +57,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) plt.title(case1, fontsize=10) intv = 5 - ## add vectors + # add vectors Q = ax.quiver( TLON[::intv, ::intv].values, TLAT[::intv, ::intv].values, @@ -103,7 +105,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) plt.title(case1, fontsize=10) intv = 5 - ## add vectors + # add vectors Q = ax.quiver( TLON[::intv, ::intv].values, TLAT[::intv, ::intv].values, @@ -151,7 +153,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) plt.title(case2 + "-" + case1, fontsize=10) intv = 5 - ## add vectors + # add vectors Q = ax.quiver( TLON[::intv, ::intv].values, TLAT[::intv, ::intv].values, From 2381668e05f8c186aca375739d3f7a61126b3694 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:41:23 -0600 Subject: [PATCH 15/23] fix imports --- cupid/quickstart.py | 7 ++++--- cupid/timeseries.py | 1 - docs/conf.py | 2 -- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/cupid/quickstart.py b/cupid/quickstart.py index 8c17864..045d6f3 100644 --- a/cupid/quickstart.py +++ b/cupid/quickstart.py @@ -1,3 +1,4 @@ -### To be created: a script, maybe called through a command line entry point, -### that sets up a directory with a config.yml file and -### basics necessary to set up a notebook collection +# To be created: a script, maybe called through a command line entry point, +# that sets up a directory with a config.yml file and +# basics necessary to set up a notebook collection +from __future__ import annotations diff --git a/cupid/timeseries.py b/cupid/timeseries.py index c71ae69..c1748ef 100644 --- a/cupid/timeseries.py +++ b/cupid/timeseries.py @@ -1,7 +1,6 @@ """ Timeseries generation tool adapted from ADF for general CUPiD use. """ - # ++++++++++++++++++++++++++++++ # Import standard python modules # ++++++++++++++++++++++++++++++ diff --git a/docs/conf.py b/docs/conf.py index eba9c81..d72d5ca 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,9 +3,7 @@ # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html - # -- Path setup -------------------------------------------------------------- - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. From ad2cf1788e20448bacca29219e3167a0c950f492 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:47:46 -0600 Subject: [PATCH 16/23] pyupgrade fixeas --- cupid/build.py | 8 +++++--- cupid/util.py | 13 ++++++++----- docs/conf.py | 2 +- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index 7962170..69d35a1 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -36,15 +36,17 @@ def build(config_path): None """ - with open(config_path, "r") as fid: + with open(config_path) as fid: control = yaml.safe_load(fid) sname = control["data_sources"]["sname"] run_dir = control["data_sources"]["run_dir"] - subprocess.run(["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) subprocess.run( - ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"], + ["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) + subprocess.run( + ["jupyter-book", "build", + f"{run_dir}/computed_notebooks/{sname}", "--all"], ) # Originally used this code to copy jupyter book HTML to a location to host it online diff --git a/cupid/util.py b/cupid/util.py index 0f774a7..68aea12 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -47,13 +47,14 @@ def execute_managed_notebook(cls, nb_man, kernel_name, **kwargs): def get_control_dict(config_path): """Get control dictionary from configuration file""" try: - with open(config_path, "r") as fid: + with open(config_path) as fid: control = yaml.safe_load(fid) except FileNotFoundError: print(f"ERROR: {config_path} not found") sys.exit(1) - default_kernel_name = control["computation_config"].pop("default_kernel_name", None) + default_kernel_name = control["computation_config"].pop( + "default_kernel_name", None) control["env_check"] = dict() @@ -61,7 +62,8 @@ def get_control_dict(config_path): for nb_category in control["compute_notebooks"].values(): # pylint: disable=invalid-name for nb, info in nb_category.items(): - info["kernel_name"] = info.get("kernel_name", default_kernel_name) + info["kernel_name"] = info.get( + `"kernel_name", default_kernel_name) if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( @@ -76,7 +78,8 @@ def get_control_dict(config_path): if "compute_scripts" in control: for script_category in control["compute_scripts"].values(): for script, info in script_category.items(): - info["kernel_name"] = info.get("kernel_name", default_kernel_name) + info["kernel_name"] = info.get( + "kernel_name", default_kernel_name) if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( @@ -120,7 +123,7 @@ def setup_book(config_path): path_to_here = os.path.dirname(os.path.realpath(__file__)) - with open(f"{path_to_here}/_jupyter-book-config-defaults.yml", "r") as fid: + with open(f"{path_to_here}/_jupyter-book-config-defaults.yml") as fid: config = yaml.safe_load(fid) # update defaults diff --git a/docs/conf.py b/docs/conf.py index d72d5ca..49a0894 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,7 +26,7 @@ os.system(f"cp ../{file} ./") # Remove any images from the first line of the file - with open(file, "r") as f: + with open(file) as f: file1 = f.readline() file1 = re.sub(" ", "", file1) file_rest = f.read() From a53fc60de57ff8778101ee758a10460a68d71982 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:50:22 -0600 Subject: [PATCH 17/23] fix black-jupyter and typo --- cupid/build.py | 6 ++---- cupid/util.py | 9 +++------ 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/cupid/build.py b/cupid/build.py index 69d35a1..847db31 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -42,11 +42,9 @@ def build(config_path): sname = control["data_sources"]["sname"] run_dir = control["data_sources"]["run_dir"] + subprocess.run(["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) subprocess.run( - ["jupyter-book", "clean", f"{run_dir}/computed_notebooks/{sname}"]) - subprocess.run( - ["jupyter-book", "build", - f"{run_dir}/computed_notebooks/{sname}", "--all"], + ["jupyter-book", "build", f"{run_dir}/computed_notebooks/{sname}", "--all"], ) # Originally used this code to copy jupyter book HTML to a location to host it online diff --git a/cupid/util.py b/cupid/util.py index 68aea12..1215d1b 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -53,8 +53,7 @@ def get_control_dict(config_path): print(f"ERROR: {config_path} not found") sys.exit(1) - default_kernel_name = control["computation_config"].pop( - "default_kernel_name", None) + default_kernel_name = control["computation_config"].pop("default_kernel_name", None) control["env_check"] = dict() @@ -62,8 +61,7 @@ def get_control_dict(config_path): for nb_category in control["compute_notebooks"].values(): # pylint: disable=invalid-name for nb, info in nb_category.items(): - info["kernel_name"] = info.get( - `"kernel_name", default_kernel_name) + info["kernel_name"] = info.get("kernel_name", default_kernel_name) if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( @@ -78,8 +76,7 @@ def get_control_dict(config_path): if "compute_scripts" in control: for script_category in control["compute_scripts"].values(): for script, info in script_category.items(): - info["kernel_name"] = info.get( - "kernel_name", default_kernel_name) + info["kernel_name"] = info.get("kernel_name", default_kernel_name) if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( From 7418983cf2ba4459e06e46842c9b2090b9aa3ca0 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 08:56:36 -0600 Subject: [PATCH 18/23] adjust line length --- .pre-commit-config.yaml | 1 + cupid/build.py | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 870b1d9..d05026d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,3 +40,4 @@ repos: rev: 7.0.0 hooks: - id: flake8 + args: [--max-line-length=120] diff --git a/cupid/build.py b/cupid/build.py index 847db31..1e80e26 100755 --- a/cupid/build.py +++ b/cupid/build.py @@ -5,8 +5,8 @@ The main function `build()` reads the configuration file (default config.yml), extracts the necessary information such as the name of the book and the -directory containing computed notebooks, and then proceeds to clean and build the -Jupyter book using the `jupyter-book` command-line tool. +directory containing computed notebooks, and then proceeds to clean and build +the Jupyter book using the `jupyter-book` command-line tool. Args: CONFIG_PATH: str, path to configuration file (default config.yml) @@ -17,7 +17,6 @@ from __future__ import annotations import subprocess -import sys import click import yaml From cc5339f8cd0526648ab52b3a93c01430037e1d10 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 09:03:02 -0600 Subject: [PATCH 19/23] flake8 --- docs/conf.py | 2 ++ examples/nblibrary/ice/vect_diff.py | 3 +-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 49a0894..523d25f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -105,6 +105,7 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. +# fmt: off html_theme_options = dict( # analytics_id="" this is configured in rtfd.io # canonical_url="", @@ -117,6 +118,7 @@ home_page_in_toc=True, extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", ) +# fmt: on # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 1fdfe14..012dc5d 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -2,7 +2,6 @@ import cartopy.crs as ccrs import cartopy.feature as cfeature -import matplotlib as mpl import matplotlib.path as mpath import matplotlib.pyplot as plt import numpy as np @@ -164,7 +163,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) transform=ccrs.PlateCarree(), ) units = "cm/s" - qk = ax.quiverkey( + ax.quiverkey( Q, 0.85, 0.025, From e3a78e1a3ffe00d8b750beaea50ff23c518c4d42 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 09:07:15 -0600 Subject: [PATCH 20/23] flake8 --- cupid/run.py | 4 +++- docs/conf.py | 4 +--- examples/nblibrary/ice/vect_diff.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cupid/run.py b/cupid/run.py index ae9c3d9..8c072ba 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -36,6 +36,8 @@ # fmt: off # pylint: disable=line-too-long + + @click.command(context_settings=CONTEXT_SETTINGS) @click.option("--serial", "-s", is_flag=True, help="Do not use LocalCluster objects") @click.option("--time-series", "-ts", is_flag=True, help="Run time series generation scripts prior to diagnostics") @@ -204,7 +206,7 @@ def run( warnings.warn( f"Environment {bad_env} specified for {nb}.ipynb could not be found;"+ f" {nb}.ipynb will not be run."+ - f"See README.md for environment installation instructions.", + "See README.md for environment installation instructions.", ) all_nbs.pop(nb) diff --git a/docs/conf.py b/docs/conf.py index 523d25f..7aeeec4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -105,7 +105,6 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# fmt: off html_theme_options = dict( # analytics_id="" this is configured in rtfd.io # canonical_url="", @@ -117,8 +116,7 @@ use_issues_button=True, home_page_in_toc=True, extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", -) -# fmt: on +) # noqa # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 012dc5d..1c0422e 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -115,7 +115,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) transform=ccrs.PlateCarree(), ) units = "cm/s" - qk = ax.quiverkey( + ax.quiverkey( Q, 0.85, 0.025, From 825843023db1d970abbd5d437fd5347973a2989b Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 09:10:41 -0600 Subject: [PATCH 21/23] flake8 take 3 --- docs/conf.py | 4 ++-- examples/nblibrary/ice/vect_diff.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 7aeeec4..bd59efa 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -115,8 +115,8 @@ use_repository_button=True, use_issues_button=True, home_page_in_toc=True, - extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", -) # noqa + extra_footer="The National Center for Atmospheric Research is sponsored by the National Science Foundation. Any opinions, findings and conclusions or recommendations expressed in this material do not necessarily reflect the views of the National Science Foundation.", # noqa +) # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/examples/nblibrary/ice/vect_diff.py b/examples/nblibrary/ice/vect_diff.py index 1c0422e..80ca09e 100644 --- a/examples/nblibrary/ice/vect_diff.py +++ b/examples/nblibrary/ice/vect_diff.py @@ -67,7 +67,7 @@ def vect_diff(uvel1, vvel1, uvel2, vvel2, angle, proj, case1, case2, TLAT, TLON) transform=ccrs.PlateCarree(), ) units = "cm/s" - qk = ax.quiverkey( + ax.quiverkey( Q, 0.85, 0.025, From ce6ead11e23edf5d4bdc286354dc6d21c9922e57 Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 09:22:48 -0600 Subject: [PATCH 22/23] remove autopep8 due to conflicts with black-jupyter --- .pre-commit-config.yaml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d05026d..672078c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,11 +31,6 @@ repos: - id: pyupgrade args: [--py38-plus] - - repo: https://github.com/hhatto/autopep8 - rev: v2.1.0 - hooks: - - id: autopep8 - - repo: https://github.com/PyCQA/flake8 rev: 7.0.0 hooks: From e8eb815266ec120ca6014064879aa5d31561091b Mon Sep 17 00:00:00 2001 From: Teagan Date: Thu, 16 May 2024 09:31:59 -0600 Subject: [PATCH 23/23] fix remaining pre-commits --- cupid/run.py | 6 +++--- cupid/util.py | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/cupid/run.py b/cupid/run.py index 8c072ba..d934cc7 100755 --- a/cupid/run.py +++ b/cupid/run.py @@ -204,8 +204,8 @@ def run( if not control["env_check"][info["kernel_name"]]: bad_env = info["kernel_name"] warnings.warn( - f"Environment {bad_env} specified for {nb}.ipynb could not be found;"+ - f" {nb}.ipynb will not be run."+ + f"Environment {bad_env} specified for {nb}.ipynb could not be found;" + + f" {nb}.ipynb will not be run." + "See README.md for environment installation instructions.", ) all_nbs.pop(nb) @@ -247,7 +247,7 @@ def run( if not control["env_check"][info["kernel_name"]]: bad_env = info["kernel_name"] warnings.warn( - f"Environment {bad_env} specified for {script}.py could not be found;"+ + f"Environment {bad_env} specified for {script}.py could not be found;" + f"{script}.py will not be run.", ) all_scripts.pop(script) diff --git a/cupid/util.py b/cupid/util.py index 1215d1b..2d5b9cc 100644 --- a/cupid/util.py +++ b/cupid/util.py @@ -65,7 +65,8 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No conda environment specified for {nb}.ipynb and no default kernel set, will use cupid-analysis environment.", + f"No conda environment specified for {nb}.ipynb and" + + " no default kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = ( @@ -80,7 +81,8 @@ def get_control_dict(config_path): if info["kernel_name"] is None: info["kernel_name"] = "cupid-analysis" warnings.warn( - f"No environment specified for {script}.py and no default kernel set, will use cupid-analysis environment.", + f"No environment specified for {script}.py and no default" + + " kernel set, will use cupid-analysis environment.", ) if info["kernel_name"] not in control["env_check"]: control["env_check"][info["kernel_name"]] = (