From ef11a6b21755deb75972f044b34c56d55e44663b Mon Sep 17 00:00:00 2001 From: CoLM-SYSU <89968722+CoLM-SYSU@users.noreply.github.com> Date: Tue, 11 Jun 2024 23:06:18 +0800 Subject: [PATCH] Revert "Add meteorological forcing downscaling module" --- Makefile | 2 - include/Makeoptions | 53 +- include/Makeoptions.CMA-HPC | 0 ...keoptions.SYSU-BaiduBoat_Parallel_with_zip | 0 include/Makeoptions.gnu | 7 +- include/Makeoptions_origin | 55 - include/define.h | 8 +- main/MOD_Const_PFT.F90 | 2 +- main/MOD_CropReadin.F90 | 4 +- main/MOD_Forcing.F90 | 483 ++--- main/MOD_ForcingDownscaling.F90 | 809 +++++--- main/MOD_HistGridded.F90 | 494 ++--- main/MOD_HistVector.F90 | 214 +- main/MOD_HistWriteBack.F90 | 13 +- main/MOD_OrbCosazi.F90 | 68 - main/MOD_OrbCoszen.F90 | 3 - main/MOD_Vars_1DForcing.F90 | 3 - main/MOD_Vars_Global.F90 | 5 - main/MOD_Vars_TimeInvariants.F90 | 87 +- mkinidata/MOD_PercentagesPFTReadin.F90 | 21 +- mksrfdata/Aggregation_PercentagesPFT.F90 | 21 +- mksrfdata/Aggregation_TopographyFactors.F90 | 417 ---- mksrfdata/MKSRFDATA.F90 | 159 +- mksrfdata/MOD_AggregationRequestData.F90 | 296 +-- mksrfdata/MOD_LandCrop.F90 | 23 +- mksrfdata/MOD_LandPFT.F90 | 16 + mksrfdata/MOD_PixelsetShared.F90 | 2 + share/MOD_Grid.F90 | 418 ++-- share/MOD_InterpArealWeight.F90 | 1839 ----------------- share/MOD_InterpBilinear.F90 | 1037 ---------- share/MOD_Mapping_Grid2Pset.F90 | 905 -------- share/MOD_Mapping_Pset2Grid.F90 | 1079 ---------- share/MOD_Namelist.F90 | 9 +- share/MOD_NetCDFVector.F90 | 1596 -------------- share/MOD_SpatialMapping.F90 | 6 +- 35 files changed, 1501 insertions(+), 8653 deletions(-) mode change 100755 => 100644 Makefile mode change 100755 => 120000 include/Makeoptions mode change 100755 => 100644 include/Makeoptions.CMA-HPC mode change 100755 => 100644 include/Makeoptions.SYSU-BaiduBoat_Parallel_with_zip mode change 100755 => 100644 include/Makeoptions.gnu delete mode 100644 include/Makeoptions_origin mode change 100755 => 100644 main/MOD_Const_PFT.F90 mode change 100755 => 100644 main/MOD_CropReadin.F90 mode change 100755 => 100644 main/MOD_HistGridded.F90 mode change 100755 => 100644 main/MOD_HistVector.F90 mode change 100755 => 100644 main/MOD_HistWriteBack.F90 delete mode 100644 main/MOD_OrbCosazi.F90 mode change 100755 => 100644 mkinidata/MOD_PercentagesPFTReadin.F90 mode change 100755 => 100644 mksrfdata/Aggregation_PercentagesPFT.F90 delete mode 100644 mksrfdata/Aggregation_TopographyFactors.F90 mode change 100755 => 100644 mksrfdata/MOD_LandCrop.F90 mode change 100755 => 100644 mksrfdata/MOD_LandPFT.F90 mode change 100755 => 100644 mksrfdata/MOD_PixelsetShared.F90 delete mode 100644 share/MOD_InterpArealWeight.F90 delete mode 100644 share/MOD_InterpBilinear.F90 delete mode 100644 share/MOD_Mapping_Grid2Pset.F90 delete mode 100644 share/MOD_Mapping_Pset2Grid.F90 delete mode 100755 share/MOD_NetCDFVector.F90 diff --git a/Makefile b/Makefile old mode 100755 new mode 100644 index 5aa3e86d..d2cbaeff --- a/Makefile +++ b/Makefile @@ -77,7 +77,6 @@ OBJS_MKSRFDATA = \ Aggregation_SoilParameters.o \ Aggregation_DBedrock.o \ Aggregation_Topography.o \ - Aggregation_TopographyFactors.o \ Aggregation_Urban.o \ MOD_MeshFilter.o \ MOD_RegionClip.o \ @@ -128,7 +127,6 @@ OBJS_BASIC = \ MOD_NdepData.o \ MOD_FireData.o \ MOD_OrbCoszen.o \ - MOD_OrbCosazi.o \ MOD_3DCanopyRadiation.o \ MOD_Aerosol.o \ MOD_SnowSnicar.o \ diff --git a/include/Makeoptions b/include/Makeoptions deleted file mode 100755 index 6e99b60b..00000000 --- a/include/Makeoptions +++ /dev/null @@ -1,52 +0,0 @@ -# ======================================================= -# mpif90 - gfortran -# - - FF = mpif90 -fopenmp - - NETCDF_LIB = /usr/lib/x86_64-linux-gnu - NETCDF_INC = /usr/include - - MOD_CMD = -J - -# determine the gfortran version - GCC_VERSION := "`gcc -dumpversion`" - IS_GCC_ABOVE_10 := $(shell expr "$(GCC_VERSION)" ">=" "10") - ifeq "$(IS_GCC_ABOVE_10)" "1" - FOPTS = -fdefault-real-8 -ffree-form -C -g -u -xcheck=stkovf \ - -ffpe-trap=invalid,zero,overflow -fbounds-check \ - -mcmodel=medium -fbacktrace -fdump-core -cpp \ - -ffree-line-length-0 -fallow-argument-mismatch - else - FOPTS = -fdefault-real-8 -ffree-form -C -g -u -xcheck=stkovf \ - -ffpe-trap=invalid,zero,overflow -fbounds-check \ - -mcmodel=medium -fbacktrace -fdump-core -cpp \ - -ffree-line-length-0 - endif - - INCLUDE_DIR = -I../include -I../share -I../mksrfdata -I../mkinidata -I../main -I$(NETCDF_INC) - LDFLAGS = -L$(NETCDF_LIB) -lnetcdff -lnetcdf -llapack -lblas - - - -#============================================================ -# CaMa-Flood Mkinclude (for Linux, gfortran) - -RM = /bin/rm -f -CP = /bin/cp -#---- -# Pre-Prosessing options -# DMPI=-DUseMPI: activate when MPI parallelization is used -# DCDF=-DUseCDF: activate when using netCDF, comment out when not needed -# DATM=-DNoAtom: activate when OMP ATOMIC calculation should be avoided (bit identical simulation) -#---- -#DMPI=-DUseMPI -DCDF=-DUseCDF -#DATM=-DNoAtom -CFLAGS=$(DMPI) $(DCDF) $(DATM) -#---- -FCMP = /usr/bin/gfortran -fopenmp -FC = /usr/bin/gfortran - -LFLAGS = -FFLAGS = -O3 -Wall -cpp -free -fimplicit-none -fbounds-check -fbacktrace diff --git a/include/Makeoptions b/include/Makeoptions new file mode 120000 index 00000000..02b334d2 --- /dev/null +++ b/include/Makeoptions @@ -0,0 +1 @@ +Makeoptions.gnu \ No newline at end of file diff --git a/include/Makeoptions.CMA-HPC b/include/Makeoptions.CMA-HPC old mode 100755 new mode 100644 diff --git a/include/Makeoptions.SYSU-BaiduBoat_Parallel_with_zip b/include/Makeoptions.SYSU-BaiduBoat_Parallel_with_zip old mode 100755 new mode 100644 diff --git a/include/Makeoptions.gnu b/include/Makeoptions.gnu old mode 100755 new mode 100644 index 6e99b60b..ea970844 --- a/include/Makeoptions.gnu +++ b/include/Makeoptions.gnu @@ -4,8 +4,11 @@ FF = mpif90 -fopenmp - NETCDF_LIB = /usr/lib/x86_64-linux-gnu - NETCDF_INC = /usr/include +NETCDF_LIB = /usr/lib/x86_64-linux-gnu +NETCDF_INC = /usr/include + +# NETCDF_LIB = /opt/netcdf-c-4.9.2-fortran-4.6.0-gnu/lib +# NETCDF_INC = /opt/netcdf-c-4.9.2-fortran-4.6.0-gnu/include MOD_CMD = -J diff --git a/include/Makeoptions_origin b/include/Makeoptions_origin deleted file mode 100644 index d9593b75..00000000 --- a/include/Makeoptions_origin +++ /dev/null @@ -1,55 +0,0 @@ -# ======================================================= -# mpif90 - gfortran -# - - FF = mpif90 -fopenmp - -# NETCDF_LIB = /usr/lib/x86_64-linux-gnu -# NETCDF_INC = /usr/include - - NETCDF_LIB = /opt/netcdf-c-4.9.2-fortran-4.6.0-gnu/lib - NETCDF_INC = /opt/netcdf-c-4.9.2-fortran-4.6.0-gnu/include - - MOD_CMD = -J - -# determine the gfortran version - GCC_VERSION := "`gcc -dumpversion`" - IS_GCC_ABOVE_10 := $(shell expr "$(GCC_VERSION)" ">=" "10") - ifeq "$(IS_GCC_ABOVE_10)" "1" - FOPTS = -fdefault-real-8 -ffree-form -C -g -u -xcheck=stkovf \ - -ffpe-trap=invalid,zero,overflow -fbounds-check \ - -mcmodel=medium -fbacktrace -fdump-core -cpp \ - -ffree-line-length-0 -fallow-argument-mismatch - else - FOPTS = -fdefault-real-8 -ffree-form -C -g -u -xcheck=stkovf \ - -ffpe-trap=invalid,zero,overflow -fbounds-check \ - -mcmodel=medium -fbacktrace -fdump-core -cpp \ - -ffree-line-length-0 - endif - - INCLUDE_DIR = -I../include -I../share -I../mksrfdata -I../mkinidata -I../main -I$(NETCDF_INC) - LDFLAGS = -L$(NETCDF_LIB) -lnetcdff -lnetcdf -llapack -lblas - - - -#============================================================ -# CaMa-Flood Mkinclude (for Linux, gfortran) - -RM = /bin/rm -f -CP = /bin/cp -#---- -# Pre-Prosessing options -# DMPI=-DUseMPI: activate when MPI parallelization is used -# DCDF=-DUseCDF: activate when using netCDF, comment out when not needed -# DATM=-DNoAtom: activate when OMP ATOMIC calculation should be avoided (bit identical simulation) -#---- -#DMPI=-DUseMPI -DCDF=-DUseCDF -#DATM=-DNoAtom -CFLAGS=$(DMPI) $(DCDF) $(DATM) -#---- -FCMP = /usr/bin/gfortran -fopenmp -FC = /usr/bin/gfortran - -LFLAGS = -FFLAGS = -O3 -Wall -cpp -free -fimplicit-none -fbounds-check -fbacktrace diff --git a/include/define.h b/include/define.h index e8d91e4a..67a08740 100755 --- a/include/define.h +++ b/include/define.h @@ -20,7 +20,7 @@ ! 3.1 If defined, range of variables is checked. #define RangeCheck ! 3.1 If defined, surface data in vector is mapped to gridded data for checking. -#define SrfdataDiag +#undef SrfdataDiag ! 4. If defined, MPI parallelization is enabled. #define USEMPI @@ -35,20 +35,16 @@ #define vanGenuchten_Mualem_SOIL_MODEL ! 5.2 If defined, lateral flow is modeled. #define CatchLateralFlow -#define UnstrLateralFlow ! Conflicts : #ifndef CATCHMENT #undef CatchLateralFlow #endif -#ifndef UNSTRUCTURED -#undef UnstrLateralFlow -#endif ! 6. If defined, CaMa-Flood model will be used. #undef CaMa_Flood ! 7. If defined, BGC model is used. -#undef BGC +#undef BGC ! Conflicts : only used when LULC_IGBP_PFT is defined. #ifndef LULC_IGBP_PFT diff --git a/main/MOD_Const_PFT.F90 b/main/MOD_Const_PFT.F90 old mode 100755 new mode 100644 index dc1cff26..73003391 --- a/main/MOD_Const_PFT.F90 +++ b/main/MOD_Const_PFT.F90 @@ -375,7 +375,7 @@ MODULE MOD_Const_PFT , 57.0, 57.0, 57.0, 57.0, 57.0, 57.0, 57.0, 57.0& , 57.0, 57.0, 57.0, 57.0, 57.0, 57.0, 57.0 & #endif - /) * 0.3 * 1.e-6 + /) * 1.e-6 ! quantum efficiency real(r8), parameter :: effcon_p(0:N_PFT+N_CFT-1) & diff --git a/main/MOD_CropReadin.F90 b/main/MOD_CropReadin.F90 old mode 100755 new mode 100644 index 5516e503..4ca4f395 --- a/main/MOD_CropReadin.F90 +++ b/main/MOD_CropReadin.F90 @@ -42,12 +42,12 @@ SUBROUTINE CROP_readin () character(len=256) :: file_crop type(grid_type) :: grid_crop - type(block_data_real8_2d) :: f_xy_crop + type(block_data_real8_2d) :: f_xy_crop type(spatial_mapping_type) :: mg2patch_crop type(spatial_mapping_type) :: mg2pft_crop character(len=256) :: file_irrig type(grid_type) :: grid_irrig - type(block_data_int32_2d) :: f_xy_irrig + type(block_data_int32_2d) :: f_xy_irrig type(spatial_mapping_type) :: mg2pft_irrig real(r8),allocatable :: pdrice2_tmp (:) diff --git a/main/MOD_Forcing.F90 b/main/MOD_Forcing.F90 index 384ae09e..4b8ab955 100644 --- a/main/MOD_Forcing.F90 +++ b/main/MOD_Forcing.F90 @@ -51,10 +51,7 @@ MODULE MOD_Forcing type(pointer_real8_1d), allocatable :: forc_prc_grid (:) type(pointer_real8_1d), allocatable :: forc_prl_grid (:) type(pointer_real8_1d), allocatable :: forc_lwrad_grid(:) - type(pointer_real8_1d), allocatable :: forc_swrad_grid(:) type(pointer_real8_1d), allocatable :: forc_hgt_grid (:) - type(pointer_real8_1d), allocatable :: forc_us_grid (:) - type(pointer_real8_1d), allocatable :: forc_vs_grid (:) type(pointer_real8_1d), allocatable :: forc_t_part (:) type(pointer_real8_1d), allocatable :: forc_th_part (:) @@ -64,9 +61,6 @@ MODULE MOD_Forcing type(pointer_real8_1d), allocatable :: forc_prc_part (:) type(pointer_real8_1d), allocatable :: forc_prl_part (:) type(pointer_real8_1d), allocatable :: forc_frl_part (:) - type(pointer_real8_1d), allocatable :: forc_swrad_part (:) - type(pointer_real8_1d), allocatable :: forc_us_part (:) - type(pointer_real8_1d), allocatable :: forc_vs_part (:) logical, allocatable :: glacierss (:) @@ -134,9 +128,6 @@ SUBROUTINE forcing_init (dir_forcing, deltatime, ststamp, lc_year, etstamp) real(r8) :: missing_value integer :: ielm, istt, iend - real(r8), allocatable :: vecone(:) - integer :: iblkme, xblk, yblk, xloc, yloc - CALL init_user_specified_forcing ! CO2 data initialization @@ -246,10 +237,7 @@ SUBROUTINE forcing_init (dir_forcing, deltatime, ststamp, lc_year, etstamp) CALL mg2p_forc%allocate_part (forc_prc_grid ) CALL mg2p_forc%allocate_part (forc_prl_grid ) CALL mg2p_forc%allocate_part (forc_lwrad_grid ) - CALL mg2p_forc%allocate_part (forc_swrad_grid ) CALL mg2p_forc%allocate_part (forc_hgt_grid ) - CALL mg2p_forc%allocate_part (forc_us_grid ) - CALL mg2p_forc%allocate_part (forc_vs_grid ) CALL mg2p_forc%allocate_part (forc_t_part ) CALL mg2p_forc%allocate_part (forc_th_part ) @@ -259,9 +247,6 @@ SUBROUTINE forcing_init (dir_forcing, deltatime, ststamp, lc_year, etstamp) CALL mg2p_forc%allocate_part (forc_prc_part ) CALL mg2p_forc%allocate_part (forc_prl_part ) CALL mg2p_forc%allocate_part (forc_frl_part ) - CALL mg2p_forc%allocate_part (forc_swrad_part ) - CALL mg2p_forc%allocate_part (forc_us_part ) - CALL mg2p_forc%allocate_part (forc_vs_part ) CALL mg2p_forc%grid2part (topo_grid, forc_topo_grid ) CALL mg2p_forc%grid2part (maxelv_grid, forc_maxelv_grid) @@ -341,7 +326,6 @@ SUBROUTINE forcing_final () deallocate (forc_prc_grid ) deallocate (forc_prl_grid ) deallocate (forc_lwrad_grid ) - deallocate (forc_swrad_grid ) deallocate (forc_hgt_grid ) deallocate (forc_t_part ) @@ -352,7 +336,6 @@ SUBROUTINE forcing_final () deallocate (forc_prc_part ) deallocate (forc_prl_part ) deallocate (forc_frl_part ) - deallocate (forc_swrad_part ) ENDIF ENDIF @@ -372,12 +355,11 @@ END SUBROUTINE forcing_reset !-------------------------------- SUBROUTINE read_forcing (idate, dir_forcing) - USE MOD_OrbCosazi + USE MOD_Precision USE MOD_Namelist USE MOD_Const_Physical, only: rgas, grav USE MOD_Vars_TimeInvariants - USE MOD_Vars_TimeVariables, only: alb USE MOD_Vars_1DForcing USE MOD_Vars_2DForcing USE MOD_Block @@ -387,154 +369,146 @@ SUBROUTINE read_forcing (idate, dir_forcing) USE MOD_LandPatch USE MOD_RangeCheck USE MOD_UserSpecifiedForcing - USE MOD_ForcingDownscaling, only : rair, cpair, downscale_forcings - USE MOD_NetCDFVector + USE MOD_ForcingDownscaling, only : rair, cpair, downscale_forcings_1c IMPLICIT NONE - integer, intent(in) :: idate(3) character(len=*), intent(in) :: dir_forcing ! local variables: - integer :: ivar, istt, iend, id(3) + integer :: ivar, istt, iend integer :: iblkme, ib, jb, i, j, ilon, ilat, np, ipart, ne - real(r8) :: calday ! Julian cal day (1.xx to 365.xx) + real(r8) :: calday ! Julian cal day (1.xx to 365.xx) real(r8) :: sunang, cloud, difrat, vnrat real(r8) :: a, hsolar, ratio_rvrf type(block_data_real8_2d) :: forc_xy_solarin - integer :: ii - character(10) :: cyear = "2005" - character(256):: lndname - + type(timestamp) :: mtstamp + integer :: id(3) integer :: dtLB, dtUB - real(r8) :: cosz, coszen(numpatch), cosa, cosazi(numpatch), balb - INTEGER :: year, month, mday + real(r8) :: cosz + integer :: year, month, mday logical :: has_u,has_v + real solar, frl, prcp, tm, us, vs, pres, qm real(r8) :: pco2m - ! ! diagnostic variables - real(r8) :: sm_azi_zen (1:numpatch) ! aggregated shadow mask on patch - real(r8) :: illumination_patch (1:numpatch) ! illumination angle (cos) - real(r8) :: beam_swrad (1:numpatch) ! beam shortwave radiation (W/m**2) - real(r8) :: diffuse_swrad (1:numpatch) ! diffuse shortwave radiation (W/m**2) - real(r8) :: reflect_swrad (1:numpatch) ! reflect shortwave radiation (W/m**2) - real(r8) :: alb_wsa (1:numpatch) ! white sky albedo, use model caculate diffuse albdeo - real(r8) :: alb_bsa (1:numpatch) ! black sky albedo, use model caculate direct albedo - - - IF (p_is_io) THEN - !------------------------------------------------------------ - ! READ in THE ATMOSPHERIC FORCING - ! read lower and upper boundary forcing data - CALL metreadLBUB(idate, dir_forcing) - ! set model time stamp - id(:) = idate(:) - !CALL adj2end(id) - mtstamp = id - has_u = .true. - has_v = .true. - ! loop for variables - DO ivar = 1, NVAR - IF (ivar == 5 .and. trim(vname(ivar)) == 'NULL') has_u = .false. - IF (ivar == 6 .and. trim(vname(ivar)) == 'NULL') has_v = .false. - IF (trim(vname(ivar)) == 'NULL') CYCLE ! no data, CYCLE - IF (trim(tintalgo(ivar)) == 'NULL') CYCLE - - ! to make sure the forcing data calculated is in the range of time - ! interval [LB, UB] - IF ( (mtstamp < tstamp_LB(ivar)) .or. (tstamp_UB(ivar) < mtstamp) ) THEN - write(6, *) "the data required is out of range! STOP!"; CALL CoLM_stop() - ENDIF + IF (p_is_io) THEN - ! calcualte distance to lower/upper boundary - dtLB = mtstamp - tstamp_LB(ivar) - dtUB = tstamp_UB(ivar) - mtstamp + !------------------------------------------------------------ + ! READ in THE ATMOSPHERIC FORCING - ! nearest method, for precipitation - IF (tintalgo(ivar) == 'nearest') THEN - IF (dtLB <= dtUB) THEN - CALL block_data_copy (forcn_LB(ivar), forcn(ivar)) - ELSE - CALL block_data_copy (forcn_UB(ivar), forcn(ivar)) - ENDIF - ENDIF + ! read lower and upper boundary forcing data + CALL metreadLBUB(idate, dir_forcing) - ! linear method, for T, Pres, Q, W, LW - IF (tintalgo(ivar) == 'linear') THEN - IF ( (dtLB+dtUB) > 0 ) THEN - CALL block_data_linear_interp ( & - forcn_LB(ivar), real(dtUB,r8)/real(dtLB+dtUB,r8), & - forcn_UB(ivar), real(dtLB,r8)/real(dtLB+dtUB,r8), & - forcn(ivar)) - ELSE - CALL block_data_copy (forcn_LB(ivar), forcn(ivar)) - ENDIF - ENDIF + ! set model time stamp + id(:) = idate(:) + !CALL adj2end(id) + mtstamp = id - ! coszen method, for SW - IF (tintalgo(ivar) == 'coszen') THEN - DO iblkme = 1, gblock%nblkme - ib = gblock%xblkme(iblkme) - jb = gblock%yblkme(iblkme) + has_u = .true. + has_v = .true. + ! loop for variables + DO ivar = 1, NVAR - DO j = 1, gforc%ycnt(jb) - DO i = 1, gforc%xcnt(ib) + IF (ivar == 5 .and. trim(vname(ivar)) == 'NULL') has_u = .false. + IF (ivar == 6 .and. trim(vname(ivar)) == 'NULL') has_v = .false. + IF (trim(vname(ivar)) == 'NULL') CYCLE ! no data, CYCLE + IF (trim(tintalgo(ivar)) == 'NULL') CYCLE - ilat = gforc%ydsp(jb) + j - ilon = gforc%xdsp(ib) + i - IF (ilon > gforc%nlon) ilon = ilon - gforc%nlon + ! to make sure the forcing data calculated is in the range of time + ! interval [LB, UB] + IF ( (mtstamp < tstamp_LB(ivar)) .or. (tstamp_UB(ivar) < mtstamp) ) THEN + write(6, *) "the data required is out of range! STOP!"; CALL CoLM_stop() + ENDIF - calday = calendarday(mtstamp) - cosz = orb_coszen(calday, gforc%rlon(ilon), gforc%rlat(ilat)) - cosz = max(0.001, cosz) - forcn(ivar)%blk(ib,jb)%val(i,j) = & - cosz / avgcos%blk(ib,jb)%val(i,j) * forcn_LB(ivar)%blk(ib,jb)%val(i,j) + ! calcualte distance to lower/upper boundary + dtLB = mtstamp - tstamp_LB(ivar) + dtUB = tstamp_UB(ivar) - mtstamp + + ! nearest method, for precipitation + IF (tintalgo(ivar) == 'nearest') THEN + IF (dtLB <= dtUB) THEN + CALL block_data_copy (forcn_LB(ivar), forcn(ivar)) + ELSE + CALL block_data_copy (forcn_UB(ivar), forcn(ivar)) + ENDIF + ENDIF + ! linear method, for T, Pres, Q, W, LW + IF (tintalgo(ivar) == 'linear') THEN + IF ( (dtLB+dtUB) > 0 ) THEN + CALL block_data_linear_interp ( & + forcn_LB(ivar), real(dtUB,r8)/real(dtLB+dtUB,r8), & + forcn_UB(ivar), real(dtLB,r8)/real(dtLB+dtUB,r8), & + forcn(ivar)) + ELSE + CALL block_data_copy (forcn_LB(ivar), forcn(ivar)) + ENDIF + ENDIF + + ! coszen method, for SW + IF (tintalgo(ivar) == 'coszen') THEN + DO iblkme = 1, gblock%nblkme + ib = gblock%xblkme(iblkme) + jb = gblock%yblkme(iblkme) + + DO j = 1, gforc%ycnt(jb) + DO i = 1, gforc%xcnt(ib) + + ilat = gforc%ydsp(jb) + j + ilon = gforc%xdsp(ib) + i + IF (ilon > gforc%nlon) ilon = ilon - gforc%nlon + + calday = calendarday(mtstamp) + cosz = orb_coszen(calday, gforc%rlon(ilon), gforc%rlat(ilat)) + cosz = max(0.001, cosz) + forcn(ivar)%blk(ib,jb)%val(i,j) = & + cosz / avgcos%blk(ib,jb)%val(i,j) * forcn_LB(ivar)%blk(ib,jb)%val(i,j) + + ENDDO + ENDDO ENDDO - ENDDO + ENDIF + ENDDO - ENDIF - ENDDO - - ! preprocess for forcing data, only for QIAN data right now? - CALL metpreprocess (gforc, forcn) - - CALL allocate_block_data (gforc, forc_xy_solarin) - - CALL block_data_copy (forcn(1), forc_xy_t ) - CALL block_data_copy (forcn(2), forc_xy_q ) - CALL block_data_copy (forcn(3), forc_xy_psrf ) - CALL block_data_copy (forcn(3), forc_xy_pbot ) - CALL block_data_copy (forcn(4), forc_xy_prl, sca = 2/3._r8) - CALL block_data_copy (forcn(4), forc_xy_prc, sca = 1/3._r8) - CALL block_data_copy (forcn(7), forc_xy_solarin) - CALL block_data_copy (forcn(8), forc_xy_frl ) - IF (DEF_USE_CBL_HEIGHT) THEN - CALL block_data_copy (forcn(9), forc_xy_hpbl ) - ENDIF - - IF (has_u .and. has_v) THEN - CALL block_data_copy (forcn(5), forc_xy_us ) - CALL block_data_copy (forcn(6), forc_xy_vs ) - ELSEIF (has_u) THEN - CALL block_data_copy (forcn(5), forc_xy_us , sca = 1/sqrt(2.0_r8)) - CALL block_data_copy (forcn(5), forc_xy_vs , sca = 1/sqrt(2.0_r8)) - ELSEIF (has_v) THEN - CALL block_data_copy (forcn(6), forc_xy_us , sca = 1/sqrt(2.0_r8)) - CALL block_data_copy (forcn(6), forc_xy_vs , sca = 1/sqrt(2.0_r8)) - ELSE - IF (.not.trim(DEF_forcing%dataset) == 'CPL7') THEN - write(6, *) "At least one of the wind components must be provided! STOP!"; - CALL CoLM_stop() - ENDIF - ENDIF + ! preprocess for forcing data, only for QIAN data right now? + CALL metpreprocess (gforc, forcn) - CALL flush_block_data (forc_xy_hgt_u, real(HEIGHT_V,r8)) - CALL flush_block_data (forc_xy_hgt_t, real(HEIGHT_T,r8)) - CALL flush_block_data (forc_xy_hgt_q, real(HEIGHT_Q,r8)) + CALL allocate_block_data (gforc, forc_xy_solarin) + + CALL block_data_copy (forcn(1), forc_xy_t ) + CALL block_data_copy (forcn(2), forc_xy_q ) + CALL block_data_copy (forcn(3), forc_xy_psrf ) + CALL block_data_copy (forcn(3), forc_xy_pbot ) + CALL block_data_copy (forcn(4), forc_xy_prl, sca = 2/3._r8) + CALL block_data_copy (forcn(4), forc_xy_prc, sca = 1/3._r8) + CALL block_data_copy (forcn(7), forc_xy_solarin) + CALL block_data_copy (forcn(8), forc_xy_frl ) + IF (DEF_USE_CBL_HEIGHT) THEN + CALL block_data_copy (forcn(9), forc_xy_hpbl ) + ENDIF + + IF (has_u .and. has_v) THEN + CALL block_data_copy (forcn(5), forc_xy_us ) + CALL block_data_copy (forcn(6), forc_xy_vs ) + ELSEif (has_u) THEN + CALL block_data_copy (forcn(5), forc_xy_us , sca = 1/sqrt(2.0_r8)) + CALL block_data_copy (forcn(5), forc_xy_vs , sca = 1/sqrt(2.0_r8)) + ELSEif (has_v) THEN + CALL block_data_copy (forcn(6), forc_xy_us , sca = 1/sqrt(2.0_r8)) + CALL block_data_copy (forcn(6), forc_xy_vs , sca = 1/sqrt(2.0_r8)) + ELSE + IF (.not.trim(DEF_forcing%dataset) == 'CPL7') THEN + write(6, *) "At least one of the wind components must be provided! STOP!"; + CALL CoLM_stop() + ENDIF + ENDIF + + CALL flush_block_data (forc_xy_hgt_u, real(HEIGHT_V,r8)) + CALL flush_block_data (forc_xy_hgt_t, real(HEIGHT_T,r8)) + CALL flush_block_data (forc_xy_hgt_q, real(HEIGHT_Q,r8)) IF (solarin_all_band) THEN @@ -571,48 +545,49 @@ SUBROUTINE read_forcing (idate, dir_forcing) ENDDO ELSE - !--------------------------------------------------------------- - ! as the downward solar is in full band, an empirical expression - ! will be used to divide fractions of band and incident - ! (visible, near-infrad, dirct, diffuse) - ! Julian calday (1.xx to 365.xx) - !--------------------------------------------------------------- - DO iblkme = 1, gblock%nblkme - ib = gblock%xblkme(iblkme) - jb = gblock%yblkme(iblkme) - - DO j = 1, gforc%ycnt(jb) - DO i = 1, gforc%xcnt(ib) - - ilat = gforc%ydsp(jb) + j - ilon = gforc%xdsp(ib) + i - IF (ilon > gforc%nlon) ilon = ilon - gforc%nlon - - a = forc_xy_solarin%blk(ib,jb)%val(i,j) - calday = calendarday(idate) - sunang = orb_coszen (calday, gforc%rlon(ilon), gforc%rlat(ilat)) - - cloud = (1160.*sunang-a)/(963.*sunang) - cloud = max(cloud,0.) - cloud = min(cloud,1.) - cloud = max(0.58,cloud) - - difrat = 0.0604/(sunang-0.0223)+0.0683 - IF(difrat.lt.0.) difrat = 0. - IF(difrat.gt.1.) difrat = 1. - - difrat = difrat+(1.0-difrat)*cloud - vnrat = (580.-cloud*464.)/((580.-cloud*499.)+(580.-cloud*464.)) - - forc_xy_sols %blk(ib,jb)%val(i,j) = a*(1.0-difrat)*vnrat - forc_xy_soll %blk(ib,jb)%val(i,j) = a*(1.0-difrat)*(1.0-vnrat) - forc_xy_solsd%blk(ib,jb)%val(i,j) = a*difrat*vnrat - forc_xy_solld%blk(ib,jb)%val(i,j) = a*difrat*(1.0-vnrat) - ENDDO - ENDDO - ENDDO - ENDIF - ENDIF + !--------------------------------------------------------------- + ! as the downward solar is in full band, an empirical expression + ! will be used to divide fractions of band and incident + ! (visible, near-infrad, dirct, diffuse) + ! Julian calday (1.xx to 365.xx) + !--------------------------------------------------------------- + DO iblkme = 1, gblock%nblkme + ib = gblock%xblkme(iblkme) + jb = gblock%yblkme(iblkme) + + DO j = 1, gforc%ycnt(jb) + DO i = 1, gforc%xcnt(ib) + + ilat = gforc%ydsp(jb) + j + ilon = gforc%xdsp(ib) + i + IF (ilon > gforc%nlon) ilon = ilon - gforc%nlon + + a = forc_xy_solarin%blk(ib,jb)%val(i,j) + calday = calendarday(idate) + sunang = orb_coszen (calday, gforc%rlon(ilon), gforc%rlat(ilat)) + + cloud = (1160.*sunang-a)/(963.*sunang) + cloud = max(cloud,0.) + cloud = min(cloud,1.) + cloud = max(0.58,cloud) + + difrat = 0.0604/(sunang-0.0223)+0.0683 + IF(difrat.lt.0.) difrat = 0. + IF(difrat.gt.1.) difrat = 1. + + difrat = difrat+(1.0-difrat)*cloud + vnrat = (580.-cloud*464.)/((580.-cloud*499.)+(580.-cloud*464.)) + + forc_xy_sols %blk(ib,jb)%val(i,j) = a*(1.0-difrat)*vnrat + forc_xy_soll %blk(ib,jb)%val(i,j) = a*(1.0-difrat)*(1.0-vnrat) + forc_xy_solsd%blk(ib,jb)%val(i,j) = a*difrat*vnrat + forc_xy_solld%blk(ib,jb)%val(i,j) = a*difrat*(1.0-vnrat) + ENDDO + ENDDO + ENDDO + ENDIF + + ENDIF ! [GET ATMOSPHERE CO2 CONCENTRATION DATA] year = idate(1) @@ -623,6 +598,7 @@ SUBROUTINE read_forcing (idate, dir_forcing) ENDIF + IF (.not. DEF_USE_Forcing_Downscaling) THEN ! Mapping the 2d atmospheric fields [lon_points]x[lat_points] @@ -677,55 +653,41 @@ SUBROUTINE read_forcing (idate, dir_forcing) ENDIF ELSE - ! ------------------------------------------------------ - ! Forcing downscaling module - ! ------------------------------------------------------ - ! init forcing on patches - CALL mg2p_forc%grid2pset (forc_xy_pco2m, forc_pco2m) - CALL mg2p_forc%grid2pset (forc_xy_po2m , forc_po2m ) - CALL mg2p_forc%grid2pset (forc_xy_us , forc_us ) - CALL mg2p_forc%grid2pset (forc_xy_vs , forc_vs ) - CALL mg2p_forc%grid2pset (forc_xy_psrf , forc_psrf ) - CALL mg2p_forc%grid2pset (forc_xy_sols , forc_sols ) - CALL mg2p_forc%grid2pset (forc_xy_soll , forc_soll ) - CALL mg2p_forc%grid2pset (forc_xy_solsd, forc_solsd) - CALL mg2p_forc%grid2pset (forc_xy_solld, forc_solld) - CALL mg2p_forc%grid2pset (forc_xy_solarin, forc_swrad) - CALL mg2p_forc%grid2pset (forc_xy_hgt_t, forc_hgt_t) - CALL mg2p_forc%grid2pset (forc_xy_hgt_u, forc_hgt_u) - CALL mg2p_forc%grid2pset (forc_xy_hgt_q, forc_hgt_q) + + ! Mapping the 2d atmospheric fields [lon_points]x[lat_points] + ! -> the 1d vector of subgrid points [numelm] + CALL mg2p_forc%grid2pset (forc_xy_pco2m, forc_pco2m) + CALL mg2p_forc%grid2pset (forc_xy_po2m , forc_po2m ) + CALL mg2p_forc%grid2pset (forc_xy_us , forc_us ) + CALL mg2p_forc%grid2pset (forc_xy_vs , forc_vs ) + + CALL mg2p_forc%grid2pset (forc_xy_psrf , forc_psrf ) + + CALL mg2p_forc%grid2pset (forc_xy_sols , forc_sols ) + CALL mg2p_forc%grid2pset (forc_xy_soll , forc_soll ) + CALL mg2p_forc%grid2pset (forc_xy_solsd, forc_solsd) + CALL mg2p_forc%grid2pset (forc_xy_solld, forc_solld) + + CALL mg2p_forc%grid2pset (forc_xy_hgt_t, forc_hgt_t) + CALL mg2p_forc%grid2pset (forc_xy_hgt_u, forc_hgt_u) + CALL mg2p_forc%grid2pset (forc_xy_hgt_q, forc_hgt_q) IF (DEF_USE_CBL_HEIGHT) THEN CALL mg2p_forc%grid2pset (forc_xy_hpbl, forc_hpbl) ENDIF - ! Mapping the 2d atmospheric fields [lon_points]x[lat_points] - ! -> the 1d vector of subgrid points [numelm] - ! by selected mapping methods - CALL mg2p_forc%grid2part (forc_xy_t , forc_t_grid ) - CALL mg2p_forc%grid2part (forc_xy_q , forc_q_grid ) - CALL mg2p_forc%grid2part (forc_xy_prc , forc_prc_grid ) - CALL mg2p_forc%grid2part (forc_xy_prl , forc_prl_grid ) - CALL mg2p_forc%grid2part (forc_xy_pbot , forc_pbot_grid ) - CALL mg2p_forc%grid2part (forc_xy_frl , forc_lwrad_grid) - CALL mg2p_forc%grid2part (forc_xy_hgt_t, forc_hgt_grid ) - CALL mg2p_forc%grid2part (forc_xy_solarin, forc_swrad_grid) - CALL mg2p_forc%grid2part (forc_xy_us, forc_us_grid ) - CALL mg2p_forc%grid2part (forc_xy_vs, forc_vs_grid ) - - calday = calendarday(idate) - write(*,*) 'calday', calday + CALL mg2p_forc%grid2part (forc_xy_t , forc_t_grid ) + CALL mg2p_forc%grid2part (forc_xy_q , forc_q_grid ) + CALL mg2p_forc%grid2part (forc_xy_prc , forc_prc_grid ) + CALL mg2p_forc%grid2part (forc_xy_prl , forc_prl_grid ) + CALL mg2p_forc%grid2part (forc_xy_pbot , forc_pbot_grid ) + CALL mg2p_forc%grid2part (forc_xy_frl , forc_lwrad_grid) + CALL mg2p_forc%grid2part (forc_xy_hgt_t, forc_hgt_grid ) IF (p_is_worker) THEN - DO np = 1, numpatch ! patches - ! calculate albedo of each patches - balb = (alb(1,1,np)*forc_sols(np) & - +alb(1,2,np)*forc_solsd(np) & - +alb(2,1,np)*forc_soll(np) & - +alb(2,2,np)*forc_solld(np)) & - /(forc_sols(np)+forc_solsd(np)+forc_soll(np)+forc_solld(np)) - - DO ipart = 1, mg2p_forc%npart(np) ! part loop of each patch + + DO np = 1, numpatch + DO ipart = 1, mg2p_forc%npart(np) IF (mg2p_forc%areapart(np)%val(ipart) == 0.) CYCLE @@ -733,10 +695,10 @@ SUBROUTINE read_forcing (idate, dir_forcing) ! the ground. Scientists have measured the most frigid temperature ever ! recorded on the continent's eastern highlands: about (180K) colder than ! dry ice. - IF (forc_t_grid(np)%val(ipart) < 180.) forc_t_grid(np)%val(ipart) = 180. + IF(forc_t_grid(np)%val(ipart) < 180.) forc_t_grid(np)%val(ipart) = 180. ! the highest air temp was found in Kuwait 326 K, Sulaibya 2012-07-31; ! Pakistan, Sindh 2010-05-26; Iraq, Nasiriyah 2011-08-03 - IF (forc_t_grid(np)%val(ipart) > 326.) forc_t_grid(np)%val(ipart) = 326. + IF(forc_t_grid(np)%val(ipart) > 326.) forc_t_grid(np)%val(ipart) = 326. forc_rho_grid(np)%val(ipart) = (forc_pbot_grid(np)%val(ipart) & - 0.378*forc_q_grid(np)%val(ipart)*forc_pbot_grid(np)%val(ipart) & @@ -745,46 +707,29 @@ SUBROUTINE read_forcing (idate, dir_forcing) forc_th_grid(np)%val(ipart) = forc_t_grid(np)%val(ipart) & * (1.e5/forc_pbot_grid(np)%val(ipart)) ** (rair/cpair) - ! caculate sun zenith angle and sun azimuth angle and turn to degree - coszen(np) = orb_coszen(calday, patchlonr(np), patchlatr(np)) - cosazi(np) = orb_cosazi(calday, patchlonr(np), patchlatr(np), coszen(np)) - - ! downscale forcing from grid to part - CALL downscale_forcings ( & - glacierss(np), & - ! non-adjusted forcing + CALL downscale_forcings_1c ( glacierss(np), & + ! forcing in gridcells forc_topo_grid(np)%val(ipart), forc_maxelv_grid(np)%val(ipart), & forc_t_grid(np)%val(ipart), forc_th_grid(np)%val(ipart), & forc_q_grid(np)%val(ipart), forc_pbot_grid(np)%val(ipart), & forc_rho_grid(np)%val(ipart), forc_prc_grid(np)%val(ipart), & forc_prl_grid(np)%val(ipart), forc_lwrad_grid(np)%val(ipart), & - forc_hgt_grid(np)%val(ipart), forc_swrad_grid(np)%val(ipart), & - forc_us_grid(np)%val(ipart), forc_vs_grid(np)%val(ipart), & - - ! topography-based factor on patch - slp_type_patches(:,np), asp_type_patches(:,np), area_type_patches(:,np), & - svf_patches(np), cur_patches(np), sf_lut_patches(:,:,np), & - - ! other factors - calday, coszen(np), cosazi(np), balb, & - - ! adjusted forcing + forc_hgt_grid(np)%val(ipart), & + ! forcing in part of patches forc_topo(np), forc_t_part(np)%val(ipart), & forc_th_part(np)%val(ipart), forc_q_part(np)%val(ipart), & forc_pbot_part(np)%val(ipart), forc_rhoair_part(np)%val(ipart), & forc_prc_part(np)%val(ipart), forc_prl_part(np)%val(ipart), & - forc_frl_part(np)%val(ipart), forc_swrad_part(np)%val(ipart), & - forc_us_part(np)%val(ipart), forc_vs_part(np)%val(ipart)) + forc_frl_part(np)%val(ipart)) + ENDDO ENDDO - ENDIF - ! Conservation of short- and long- waves radiation in the grid of forcing - CALL mg2p_forc%normalize (forc_xy_solarin, forc_swrad_part) - CALL mg2p_forc%normalize (forc_xy_frl, forc_frl_part ) - - ! mapping parts to patches + ENDIF + + CALL mg2p_forc%normalize (forc_xy_frl, forc_frl_part) + CALL mg2p_forc%part2pset (forc_t_part, forc_t ) CALL mg2p_forc%part2pset (forc_q_part, forc_q ) CALL mg2p_forc%part2pset (forc_pbot_part, forc_pbot ) @@ -792,40 +737,8 @@ SUBROUTINE read_forcing (idate, dir_forcing) CALL mg2p_forc%part2pset (forc_prc_part, forc_prc ) CALL mg2p_forc%part2pset (forc_prl_part, forc_prl ) CALL mg2p_forc%part2pset (forc_frl_part, forc_frl ) - CALL mg2p_forc%part2pset (forc_swrad_part, forc_swrad ) - CALL mg2p_forc%part2pset (forc_us_part, forc_us ) - CALL mg2p_forc%part2pset (forc_vs_part, forc_vs ) - ! divide fractions of downscaled shortwave radiation - IF (p_is_worker) THEN - DO j = 1, numpatch - a = forc_swrad(j) - IF (isnan(a)) a = 0 - calday = calendarday(idate) - sunang = orb_coszen (calday, patchlonr(j), patchlatr(j)) - IF (sunang.eq.0) THEN - cloud = 0. - ELSE - cloud = (1160.*sunang-a)/(963.*sunang) - ENDIF - cloud = max(cloud,0.0001) - cloud = min(cloud,1.) - cloud = max(0.58,cloud) - - difrat = 0.0604/(sunang-0.0223)+0.0683 - IF(difrat.lt.0.) difrat = 0. - IF(difrat.gt.1.) difrat = 1. - - difrat = difrat+(1.0-difrat)*cloud - vnrat = (580.-cloud*464.)/((580.-cloud*499.)+(580.-cloud*464.)) - - forc_sols(j) = a*(1.0-difrat)*vnrat - forc_soll(j) = a*(1.0-difrat)*(1.0-vnrat) - forc_solsd(j) = a*difrat*vnrat - forc_solld(j) = a*difrat*(1.0-vnrat) - ENDDO - ENDIF - ENDIF + ENDIF #ifdef RangeCheck #ifdef USEMPI diff --git a/main/MOD_ForcingDownscaling.F90 b/main/MOD_ForcingDownscaling.F90 index 5a578ddb..ce37a511 100644 --- a/main/MOD_ForcingDownscaling.F90 +++ b/main/MOD_ForcingDownscaling.F90 @@ -4,7 +4,8 @@ MODULE MOD_ForcingDownscaling !----------------------------------------------------------------------------- ! DESCRIPTION: -! Downscaling meteorological forcings +! Downscaling of gridcell-level meteorological forcings into column-level forcings +! (not included wind and solar radiation) ! ! INITIAL: ! The Community Land Model version 5.0 (CLM5.0) @@ -12,23 +13,22 @@ MODULE MOD_ForcingDownscaling ! REVISIONS: ! Updated by Yongjiu Dai, January 16, 2023 ! Revised by Lu Li, January 24, 2024 -! Revised by Sisi Chen, Lu Li, June, 2024 -!----------------------------------------------------------------------------- +! USE MOD_Precision USE MOD_Qsadv USE MOD_Namelist USE MOD_Const_Physical - USE MOD_Vars_Global - IMPLICIT NONE - real(r8), parameter :: SHR_CONST_MWDAIR = 28.966_r8 ! molecular weight dry air [kg/kmole] - real(r8), parameter :: SHR_CONST_MWWV = 18.016_r8 ! molecular weight water vapor - real(r8), parameter :: SHR_CONST_AVOGAD = 6.02214e26_r8 ! Avogadro's number [molecules/kmole] - real(r8), parameter :: SHR_CONST_BOLTZ = 1.38065e-23_r8 ! Boltzmann's constant [J/K/molecule] + real(r8), parameter :: SHR_CONST_MWDAIR = 28.966_r8 ! molecular weight dry air [kg/kmole] + real(r8), parameter :: SHR_CONST_MWWV = 18.016_r8 ! molecular weight water vapor + real(r8), parameter :: SHR_CONST_AVOGAD = 6.02214e26_r8 ! Avogadro's number [molecules/kmole] + real(r8), parameter :: SHR_CONST_BOLTZ = 1.38065e-23_r8 ! Boltzmann's constant [J/K/molecule] real(r8), parameter :: SHR_CONST_RGAS = SHR_CONST_AVOGAD*SHR_CONST_BOLTZ ! Universal gas constant [J/K/kmole] - real(r8), parameter :: rair = SHR_CONST_RGAS/SHR_CONST_MWDAIR ! Dry air gas constant [J/K/kg] + real(r8), parameter :: SHR_CONST_RDAIR = SHR_CONST_RGAS/SHR_CONST_MWDAIR ! Dry air gas constant [J/K/kg] + + real(r8) :: rair = SHR_CONST_RDAIR ! Dry air gas constant [J/K/kg] ! On the windward side of the range, annual mean lapse rates of 3.9-5.2 (deg km-1), ! substantially smaller than the often-assumed 6.5 (deg km-1). @@ -40,20 +40,251 @@ MODULE MOD_ForcingDownscaling ! ! Kunkel, K. E., 1989: Simple procedures for extrapolation of humidity variables in the mountainous western United States. ! J. Climate, 2, 656-669. lapse_rate = /Jan - Dec/4.4,5.9,7.1,7.8,8.1,8.2,8.1,8.1,7.7,6.8,5.5,4.7/ (deg km-1) - real(r8), parameter :: lapse_rate = 0.006_r8 ! surface temperature lapse rate (deg m-1) + real(r8), parameter :: lapse_rate = 0.006_r8 ! surface temperature lapse rate (deg m-1) SAVE ! PUBLIC MEMBER FUNCTIONS: - PUBLIC :: downscale_forcings ! Downscale atmospheric forcing + PUBLIC :: downscale_forcings ! Downscale atm forcing fields from gridcell to column + PUBLIC :: downscale_forcings_1c ! Downscale atm forcing fields from gridcell to column ! PRIVATE MEMBER FUNCTIONS: PRIVATE :: rhos ! calculate atmospheric density + PRIVATE :: downscale_longwave ! Downscale longwave radiation from gridcell to column + PRIVATE :: downscale_longwave_1c ! Downscale longwave radiation from gridcell to column + !----------------------------------------------------------------------------- CONTAINS +!----------------------------------------------------------------------------- + + SUBROUTINE downscale_forcings(& + num_gridcells,num_columns,begc,endc,glaciers,wt_column,& + + !slp_c, asp_c, cur_c, svf_c, sf_c,& + + forc_topo_g ,forc_t_g ,forc_th_g ,forc_q_g ,forc_pbot_g ,& + forc_rho_g ,forc_prc_g ,forc_prl_g ,forc_lwrad_g ,forc_hgt_grc,& + !forc_us_g ,forc_vs_g ,forc_swrad_g,& + + forc_topo_c ,forc_t_c ,forc_th_c ,forc_q_c ,forc_pbot_c ,& + forc_rho_c ,forc_prc_c ,forc_prl_c ,forc_lwrad_c) + !forc_swrad_c,forc_us_c ,forc_vs_c) + +!----------------------------------------------------------------------------- +! DESCRIPTION: +! Downscale atmospheric forcing fields from gridcell to column. +! +! Downscaling is done based on the difference between each land model column's elevation and +! the atmosphere's surface elevation (which is the elevation at which the atmospheric +! forcings are valid). +! +! Note that the downscaling procedure can result in changes in grid cell mean values +! compared to what was provided by the atmosphere. We conserve fluxes of mass and +! energy, but allow states such as temperature to differ. +!----------------------------------------------------------------------------- + + IMPLICIT NONE + + ! ARGUMENTS: + integer, intent(in) :: num_gridcells ! number of gridcell (element) + integer, intent(in) :: num_columns ! number of column (patch) + integer, intent(in) :: begc (1:num_gridcells) ! beginning column index + integer, intent(in) :: endc (1:num_gridcells) ! ending column index + logical, intent(in) :: glaciers (1:num_columns) ! true: glacier column (itypwat = 3) + real(r8), intent(in) :: wt_column(1:num_columns) ! weight of the column relative to the grid cell + + ! topography factor + !real(r8), intent(in) :: forc_slp_c(1:num_columns) ! slope factor + !real(r8), intent(in) :: forc_asp_c(1:num_columns) ! aspect factor + !real(r8), intent(in) :: forc_cur_c(1:num_columns) ! curvature factor + !real(r8), intent(in) :: forc_svf_c(1:num_columns) ! sky view factor + !real(r8), intent(in) :: forc_sf_c(1:num_columns) ! shadow factor + + ! Gridcell-level non-downscaled fields: + real(r8), intent(in) :: forc_topo_g (1:num_gridcells) ! atmospheric surface height [m] + real(r8), intent(in) :: forc_t_g (1:num_gridcells) ! atmospheric temperature [Kelvin] + real(r8), intent(in) :: forc_th_g (1:num_gridcells) ! atmospheric potential temperature [Kelvin] + real(r8), intent(in) :: forc_q_g (1:num_gridcells) ! atmospheric specific humidity [kg/kg] + real(r8), intent(in) :: forc_pbot_g (1:num_gridcells) ! atmospheric pressure [Pa] + real(r8), intent(in) :: forc_rho_g (1:num_gridcells) ! atmospheric density [kg/m**3] + real(r8), intent(in) :: forc_prc_g (1:num_gridcells) ! convective precipitation in grid [mm/s] + real(r8), intent(in) :: forc_prl_g (1:num_gridcells) ! large-scale precipitation in grid [mm/s] + real(r8), intent(in) :: forc_lwrad_g (1:num_gridcells) ! grid downward longwave [W/m**2] + real(r8), intent(in) :: forc_hgt_grc (1:num_gridcells) ! atmospheric reference height [m] + !real(r8), intent(in) :: forc_us_g (1:num_gridcells) ! atmospheric u wind [m/s] + !real(r8), intent(in) :: forc_vs_g (1:num_gridcells) ! atmospheric v wind [m/s] + !real(r8), intent(in) :: forc_swrad_g (1:num_gridcells) ! grid downward shortwave [W/m**2] + + ! Column-level downscaled fields: + real(r8), intent(in) :: forc_topo_c (1:num_columns) ! column surface height [m] + real(r8), intent(out) :: forc_t_c (1:num_columns) ! atmospheric temperature [Kelvin] + real(r8), intent(out) :: forc_th_c (1:num_columns) ! atmospheric potential temperature [Kelvin] + real(r8), intent(out) :: forc_q_c (1:num_columns) ! atmospheric specific humidity [kg/kg] + real(r8), intent(out) :: forc_pbot_c (1:num_columns) ! atmospheric pressure [Pa] + real(r8), intent(out) :: forc_rho_c (1:num_columns) ! atmospheric density [kg/m**3] + real(r8), intent(out) :: forc_prc_c (1:num_columns) ! column convective precipitation [mm/s] + real(r8), intent(out) :: forc_prl_c (1:num_columns) ! column large-scale precipitation [mm/s] + real(r8), intent(out) :: forc_lwrad_c(1:num_columns) ! column downward longwave [W/m**2] + !real(r8), intent(out) :: forc_swrad_c(1:num_columns) ! column downward shortwave [W/m**2] + !real(r8), intent(out) :: forc_us_c (1:num_columns) ! column u wind [m/s] + !real(r8), intent(out) :: forc_vs_c (1:num_columns) ! column v wind [m/s] + + ! Local variables for topo downscaling: + integer :: g,c ! indices + + real(r8) :: hsurf_g, hsurf_c + real(r8) :: Hbot, zbot + real(r8) :: tbot_g, pbot_g, thbot_g, qbot_g, qs_g, es_g, rhos_g + real(r8) :: tbot_c, pbot_c, thbot_c, qbot_c, qs_c, es_c, rhos_c + real(r8) :: rhos_c_estimate, rhos_g_estimate + real(r8) :: dum1, dum2 + + real(r8) :: max_elev_c ! the maximum column level elevation value within the grid + real(r8) :: delta_prc_c ! deviation of the column convective precipitation from the grid level precipitation + real(r8) :: delta_prl_c ! deviation of the column large-scale precipitation from the grid level precipitation + + !-------------------------------------------------------------------------- + + ! Initialize column forcing (needs to be done for ALL active columns) + DO g = 1, num_gridcells + + DO c = begc(g), endc(g) + forc_t_c (c) = forc_t_g (g) + forc_th_c (c) = forc_th_g (g) + forc_q_c (c) = forc_q_g (g) + forc_pbot_c (c) = forc_pbot_g (g) + forc_rho_c (c) = forc_rho_g (g) + forc_prc_c (c) = forc_prc_g (g) + forc_prl_c (c) = forc_prl_g (g) + forc_lwrad_c(c) = forc_lwrad_g(g) + END DO + END DO + + ! Downscale forc_t, forc_th, forc_q, forc_pbot, and forc_rho to columns. + DO g = 1, num_gridcells + hsurf_g = forc_topo_g(g) ! gridcell sfc elevation + tbot_g = forc_t_g(g) ! atm sfc temp + thbot_g = forc_th_g(g) ! atm sfc pot temp + qbot_g = forc_q_g(g) ! atm sfc spec humid + pbot_g = forc_pbot_g(g) ! atm sfc pressure + rhos_g = forc_rho_g(g) ! atm density + zbot = forc_hgt_grc(g) ! atm ref height + + max_elev_c = maxval(forc_topo_c(begc(g):endc(g))) ! maximum column level elevation value within the grid + + !real(r8) :: wd(1:num_columns) ! wind direction (rad) + !real(r8) :: slp_wd(1:num_columns) ! the slope in the direction of wind + !real(r8) :: norm_slp_wd ! normalize the slope in the direction of wind + !real(r8) :: norm_cur_c ! normalize curvature factor + + ! wind adjust factor + !wd = atan(forc_vs_g(g)/forc_us_g(g)) ! cal wind direction (rad) + !slp_wd = forc_slp_c(begc(g):endc(g))*cos(wd-forc_asp_c(begc(g):endc(g))) ! the slope in the direction of wind + !norm_slp_wd = slp_wd/(2*maxval(slp_wd)) ! normalize the slope in the direction of wind + !norm_cur_c = forc_cur_c(begc(g):endc(g))/(2*maxval(forc_cur_c(begc(g):endc(g)))) ! normalize curvature factor + + DO c = begc(g), endc(g) + ! This is a simple downscaling procedure + ! Note that forc_hgt, forc_u, forc_v and solar radiation are not downscaled. + + !asp_c = forc_asp_c(c) + !cur_c = forc_cur_c(c) + + hsurf_c = forc_topo_c(c) ! column sfc elevation + tbot_c = tbot_g-lapse_rate*(hsurf_c-hsurf_g) ! adjust temp for column + Hbot = rair*0.5_r8*(tbot_g+tbot_c)/grav ! scale ht at avg temp + pbot_c = pbot_g*exp(-(hsurf_c-hsurf_g)/Hbot) ! adjust press for column + + ! Derivation of potential temperature calculation: + ! + ! The textbook definition would be: + ! thbot_c = tbot_c * (p0/pbot_c)^(rair/cpair) + ! + ! Note that pressure is related to scale height as: + ! pbot_c = p0 * exp(-zbot/Hbot) + ! + ! Plugging this in to the textbook definition, then manipulating, we get: + ! thbot_c = tbot_c * (p0/(p0*exp(-zbot/Hbot)))^(rair/cpair) + ! = tbot_c * (1/exp(-zbot/Hbot))^(rair/cpair) + ! = tbot_c * (exp(zbot/Hbot))^(rair/cpair) + ! = tbot_c * exp((zbot/Hbot) * (rair/cpair)) + + ! But we want everything expressed in delta form, resulting in: + thbot_c = thbot_g + (tbot_c - tbot_g)*exp((zbot/Hbot)*(rair/cpair)) ! adjust pot temp for column + + CALL Qsadv(tbot_g,pbot_g,es_g,dum1,qs_g,dum2) ! es, qs for gridcell + CALL Qsadv(tbot_c,pbot_c,es_c,dum1,qs_c,dum2) ! es, qs for column + qbot_c = qbot_g*(qs_c/qs_g) ! adjust q for column + + rhos_c_estimate = rhos(qbot=qbot_c, pbot=pbot_c, tbot=tbot_c) + rhos_g_estimate = rhos(qbot=qbot_g, pbot=pbot_g, tbot=tbot_g) + rhos_c = rhos_g * (rhos_c_estimate / rhos_g_estimate) ! adjust density for column + + forc_t_c(c) = tbot_c + forc_th_c(c) = thbot_c + forc_q_c(c) = qbot_c + forc_pbot_c(c) = pbot_c + forc_rho_c(c) = rhos_c + + ! adjust precipitation + IF (trim(DEF_DS_precipitation_adjust_scheme) == 'I') THEN + ! Tesfa et al, 2020: Exploring Topography-Based Methods for Downscaling + ! Subgrid Precipitation for Use in Earth System Models. Equation (5) + ! https://doi.org/ 10.1029/2019JD031456 + + delta_prc_c = forc_prc_g(g) * (forc_topo_c(c) - forc_topo_g(g)) / max_elev_c + forc_prc_c(c) = forc_prc_g(g) + delta_prc_c ! convective precipitation [mm/s] + + delta_prl_c = forc_prl_g(g) * (forc_topo_c(c) - forc_topo_g(g)) / max_elev_c + forc_prl_c(c) = forc_prl_g(g) + delta_prl_c ! large scale precipitation [mm/s] + + ELSEIF (trim(DEF_DS_precipitation_adjust_scheme) == 'II') THEN + ! Liston, G. E. and Elder, K.: A meteorological distribution system + ! for high-resolution terrestrial modeling (MicroMet), J. Hydrometeorol., 7, 217-234, 2006. + ! Equation (33) and Table 1: chi range from January to December: + ! [0.35,0.35,0.35,0.30,0.25,0.20,0.20,0.20,0.20,0.25,0.30,0.35] (1/m) + + delta_prc_c = forc_prc_g(g) * 2.0*0.27e-3*(forc_topo_c(c) - forc_topo_g(g)) & + /(1.0 - 0.27e-3*(forc_topo_c(c) - forc_topo_g(g))) + forc_prc_c(c) = forc_prc_g(g) + delta_prc_c ! large scale precipitation [mm/s] + + delta_prl_c = forc_prl_g(g) * 2.0*0.27e-3*(forc_topo_c(c) - forc_topo_g(g)) & + /(1.0 - 0.27e-3*(forc_topo_c(c) - forc_topo_g(g))) + forc_prl_c(c) = forc_prl_g(g) + delta_prl_c ! large scale precipitation [mm/s] + + ELSEIF (trim(DEF_DS_precipitation_adjust_scheme) == 'III') THEN + ! Mei, Y., Maggioni, V., Houser, P., Xue, Y., & Rouf, T. (2020). A nonparametric statistical + ! technique for spatial downscaling of precipitation over High Mountain Asia. Water Resources Research, + ! 56, e2020WR027472. https://doi.org/ 10.1029/2020WR027472 + ! Change Random forest model to AutoML model. + !TODO: Lu Li; Need to done after all other forcings are downscaled + END IF + + IF (forc_prl_c(c) < 0) THEN + write(*,*) 'negative prl', forc_prl_g(g), max_elev_c, forc_topo_c(c), forc_topo_g(g) + END IF + + IF (forc_prc_c(c) < 0) THEN + write(*,*) 'negative prc', forc_prc_g(g), max_elev_c, forc_topo_c(c), forc_topo_g(g) + END IF + + forc_prc_c(c) = max(forc_prc_c(c), 0.) + forc_prl_c(c) = max(forc_prl_c(c), 0.) + + END DO + END DO + + CALL downscale_longwave(num_gridcells, num_columns, begc, endc, glaciers, wt_column, & + forc_topo_g, forc_t_g, forc_q_g, forc_pbot_g, forc_lwrad_g, & + forc_topo_c, forc_t_c, forc_q_c, forc_pbot_c, forc_lwrad_c) + + END SUBROUTINE downscale_forcings + + + !----------------------------------------------------------------------------- PURE FUNCTION rhos(qbot, pbot, tbot) @@ -66,7 +297,7 @@ PURE FUNCTION rhos(qbot, pbot, tbot) IMPLICIT NONE ! ARGUMENTS: - real(r8) :: rhos ! function result: atmospheric density (kg/m**3) + real(r8) :: rhos ! function result: atmospheric density (kg/m**3) real(r8), intent(in) :: qbot ! atmospheric specific humidity (kg/kg) real(r8), intent(in) :: pbot ! atmospheric pressure (Pa) real(r8), intent(in) :: tbot ! atmospheric temperature (K) @@ -75,6 +306,7 @@ PURE FUNCTION rhos(qbot, pbot, tbot) real(r8) :: egcm real(r8) :: wv_to_dair_weight_ratio ! ratio of molecular weight of water vapor to that of dry air [-] + !-------------------------------------------------------------------------- wv_to_dair_weight_ratio = SHR_CONST_MWWV/SHR_CONST_MWDAIR egcm = qbot*pbot / (wv_to_dair_weight_ratio + (1._r8 - wv_to_dair_weight_ratio)*qbot) @@ -82,30 +314,194 @@ PURE FUNCTION rhos(qbot, pbot, tbot) END FUNCTION rhos + + +!----------------------------------------------------------------------------- + + SUBROUTINE downscale_longwave(& + num_gridcells, num_columns, begc, endc, glaciers, wt_column, & + forc_topo_g, forc_t_g, forc_q_g, forc_pbot_g, forc_lwrad_g, & + forc_topo_c, forc_t_c, forc_q_c, forc_pbot_c, forc_lwrad_c) + +!----------------------------------------------------------------------------- +! DESCRIPTION: +! Downscale longwave radiation from gridcell to column +! Must be done AFTER temperature downscaling !----------------------------------------------------------------------------- + + IMPLICIT NONE + + ! ARGUMENTS: + integer, intent(in) :: num_gridcells ! number of gridcell + integer, intent(in) :: num_columns ! number of column + integer, intent(in) :: begc (1:num_gridcells) ! beginning column index + integer, intent(in) :: endc (1:num_gridcells) ! ending column index + logical, intent(in) :: glaciers (1:num_columns) ! true: glacier column + real(r8), intent(in) :: wt_column (1:num_columns) ! weight of the column relative to the grid cell + + real(r8), intent(in) :: forc_topo_g (1:num_gridcells) ! atmospheric surface height (m) + real(r8), intent(in) :: forc_t_g (1:num_gridcells) ! atmospheric temperature [Kelvin] + real(r8), intent(in) :: forc_q_g (1:num_gridcells) ! atmospheric specific humidity [kg/kg] + real(r8), intent(in) :: forc_pbot_g (1:num_gridcells) ! atmospheric pressure [Pa] + real(r8), intent(in) :: forc_lwrad_g (1:num_gridcells) ! downward longwave (W/m**2) + real(r8), intent(in) :: forc_topo_c (1:num_columns) ! column surface height (m) + real(r8), intent(in) :: forc_t_c (1:num_columns) ! atmospheric temperature [Kelvin] + real(r8), intent(in) :: forc_q_c (1:num_columns) ! atmospheric specific humidity [kg/kg] + real(r8), intent(in) :: forc_pbot_c (1:num_columns) ! atmospheric pressure [Pa] + real(r8), intent(out) :: forc_lwrad_c(1:num_columns) ! downward longwave (W/m**2) + + ! LOCAL VARIABLES: + integer :: c,g ! indices + real(r8) :: hsurf_c ! column-level elevation (m) + real(r8) :: hsurf_g ! gridcell-level elevation (m) + real(r8) :: sum_lwrad_g (1:num_gridcells) ! weighted sum of column-level lwrad + real(r8) :: sum_wts_g (1:num_gridcells) ! sum of weights that contribute to sum_lwrad_g + real(r8) :: lwrad_norm_g (1:num_gridcells) ! normalization factors + real(r8) :: newsum_lwrad_g (1:num_gridcells) ! weighted sum of column-level lwrad after normalization + + real(r8) :: pv_g ! the water vapor pressure at grid cell (hPa) + real(r8) :: pv_c ! the water vapor pressure at column (hPa) + real(r8) :: emissivity_clearsky_g ! clear-sky emissivity at grid cell + real(r8) :: emissivity_clearsky_c ! clear-sky emissivity at grid column + real(r8) :: emissivity_allsky_g ! all-sky emissivity at grid cell + real(r8) :: es_g, es_c, dum1, dum2, dum3 + + real(r8), parameter :: lapse_rate_longwave = 0.032_r8 ! longwave radiation lapse rate (W m-2 m-1) + real(r8), parameter :: longwave_downscaling_limit = 0.5_r8 ! relative limit for how much longwave downscaling can be done (unitless) + + !-------------------------------------------------------------------------- + + ! Initialize column forcing (needs to be done for ALL active columns) + DO g = 1, num_gridcells + DO c = begc(g), endc(g) + forc_lwrad_c(c) = forc_lwrad_g(g) + END DO + END DO + + ! Downscale the longwave radiation, conserving energy + + ! Initialize variables related to normalization + DO g = 1, num_gridcells + sum_lwrad_g(g) = 0._r8 + sum_wts_g(g) = 0._r8 + newsum_lwrad_g(g) = 0._r8 + END DO + + ! Do the downscaling + DO g = 1, num_gridcells + DO c = begc(g), endc(g) + + hsurf_g = forc_topo_g(g) + hsurf_c = forc_topo_c(c) + + IF (trim(DEF_DS_longwave_adjust_scheme) == 'I') THEN + ! Fiddes and Gruber, 2014, TopoSCALE v.1.0: downscaling gridded climate data in + ! complex terrain. Geosci. Model Dev., 7, 387-405. doi:10.5194/gmd-7-387-2014. + ! Equation (1) (2) (3); here, the empirical parameters x1 and x2 are different from + ! Konzelmann et al. (1994) where x1 = 0.443 and x2 = 8 (optimal for measurements on the Greenland ice sheet) + + CALL Qsadv(forc_t_g(g) ,forc_pbot_g(g) ,es_g,dum1,dum2,dum3) + CALL Qsadv(forc_t_c(c),forc_pbot_c(c),es_c,dum1,dum2,dum3) + pv_g = forc_q_g(g) *es_g/100._r8 ! (hPa) + pv_c = forc_q_c(c)*es_c/100._r8 ! (hPa) + + emissivity_clearsky_g = 0.23_r8 + 0.43_r8*(pv_g/forc_t_g(g))**(1._r8/5.7_r8) + emissivity_clearsky_c = 0.23_r8 + 0.43_r8*(pv_c/forc_t_c(c))**(1._r8/5.7_r8) + emissivity_allsky_g = forc_lwrad_g(g) / (5.67e-8_r8*forc_t_g(g)**4) + + forc_lwrad_c(c) = (emissivity_clearsky_c + (emissivity_allsky_g - emissivity_clearsky_g)) & + * 5.67e-8_r8*forc_t_c(c)**4 + ELSE + ! Longwave radiation is downscaled by assuming a linear decrease in downwelling longwave radiation + ! with increasing elevation (0.032 W m-2 m-1, limited to 0.5 - 1.5 times the gridcell mean value, + ! then normalized to conserve gridcell total energy) (Van Tricht et al., 2016, TC) Figure 6, + ! doi:10.5194/tc-10-2379-2016 + + IF (glaciers(c)) THEN + forc_lwrad_c(c) = forc_lwrad_g(g) - lapse_rate_longwave * (hsurf_c-hsurf_g) + + ! Here we assume that deltaLW = (dLW/dT)*(dT/dz)*deltaz + ! We get dLW/dT = 4*eps*sigma*T^3 = 4*LW/T from the Stefan-Boltzmann law, + ! evaluated at the mean temp. We assume the same temperature lapse rate as above. + + ELSE + forc_lwrad_c(c) = forc_lwrad_g(g) & + - 4.0_r8 * forc_lwrad_g(g)/(0.5_r8*(forc_t_c(c)+forc_t_g(g))) & + * lapse_rate * (hsurf_c - hsurf_g) + END IF + END IF + + ! But ensure that we don't depart too far from the atmospheric forcing value: + ! negative values of lwrad are certainly bad, but small positive values might + ! also be bad. We can especially run into trouble due to the normalization: a + ! small lwrad value in one column can lead to a big normalization factor, + ! leading to huge lwrad values in other columns. + + forc_lwrad_c(c) = min(forc_lwrad_c(c), & + forc_lwrad_g(g) * (1._r8 + longwave_downscaling_limit)) + forc_lwrad_c(c) = max(forc_lwrad_c(c), & + forc_lwrad_g(g) * (1._r8 - longwave_downscaling_limit)) + + ! Keep track of the gridcell-level weighted sum for later normalization. + ! This gridcell-level weighted sum just includes points for which we do the + ! downscaling (e.g., glc_mec points). Thus the contributing weights + ! generally do not add to 1. So to do the normalization properly, we also + ! need to keep track of the weights that have contributed to this sum. + + sum_lwrad_g(g) = sum_lwrad_g(g) + wt_column(c)*forc_lwrad_c(c) + sum_wts_g(g) = sum_wts_g(g) + wt_column(c) + END DO + + ! Normalize forc_lwrad_c(c) to conserve energy + IF (sum_wts_g(g) == 0._r8) THEN + lwrad_norm_g(g) = 1.0_r8 + ELSE IF (sum_lwrad_g(g) == 0._r8) THEN + lwrad_norm_g(g) = 1.0_r8 + ELSE ! The standard case + lwrad_norm_g(g) = forc_lwrad_g(g) / (sum_lwrad_g(g) / sum_wts_g(g)) + END IF + + DO c = begc(g), endc(g) + forc_lwrad_c(c) = forc_lwrad_c(c) * lwrad_norm_g(g) + newsum_lwrad_g(g) = newsum_lwrad_g(g) + wt_column(c)*forc_lwrad_c(c) + END DO + + END DO + + ! Make sure that, after normalization, the grid cell mean is conserved + DO g = 1, num_gridcells + IF (sum_wts_g(g) > 0._r8) THEN + IF (abs((newsum_lwrad_g(g) / sum_wts_g(g)) - forc_lwrad_g(g)) > 1.e-8_r8) THEN + write(6,*) 'g, newsum_lwrad_g, sum_wts_g, forc_lwrad_g: ', & + g, newsum_lwrad_g(g), sum_wts_g(g), forc_lwrad_g(g) + CALL abort + END IF + END IF + END DO + + END SUBROUTINE downscale_longwave + + + + !----------------------------------------------------------------------------- - SUBROUTINE downscale_forcings (& - glaciers, & + SUBROUTINE downscale_forcings_1c (& + glaciers, & - ! non-adjusted forcing - forc_topo_g ,forc_maxelv_g ,forc_t_g ,forc_th_g ,forc_q_g ,& - forc_pbot_g ,forc_rho_g ,forc_prc_g ,forc_prl_g ,forc_lwrad_g ,& - forc_hgt_g ,forc_swrad_g ,forc_us_g ,forc_vs_g , & + !slp_c, asp_c, cur_c, svf_c, sf_c,& - ! topography-based factor on patch - slp_type_c, asp_type_c, area_type_c, svf_c, cur_c, sf_lut_c, & + forc_topo_g ,forc_maxelv_g ,forc_t_g ,forc_th_g ,forc_q_g ,& + forc_pbot_g ,forc_rho_g ,forc_prc_g ,forc_prl_g ,forc_lwrad_g ,& + forc_hgt_grc,& + !forc_us_g ,forc_vs_g ,forc_swrad_g,& - ! other factors - julian_day, coszen, cosazi, alb, & - - ! adjusted forcing - forc_topo_c ,forc_t_c ,forc_th_c ,forc_q_c ,forc_pbot_c ,& - forc_rho_c ,forc_prc_c ,forc_prl_c ,forc_lwrad_c, forc_swrad_c, & - forc_us_c ,forc_vs_c) + forc_topo_c ,forc_t_c ,forc_th_c ,forc_q_c ,forc_pbot_c ,& + forc_rho_c ,forc_prc_c ,forc_prl_c ,forc_lwrad_c) + !forc_swrad_c,forc_us_c ,forc_vs_c) !----------------------------------------------------------------------------- ! DESCRIPTION: -! Downscale atmospheric forcing fields. +! Downscale atmospheric forcing fields from gridcell to column. ! ! Downscaling is done based on the difference between each land model column's elevation and ! the atmosphere's surface elevation (which is the elevation at which the atmospheric @@ -118,25 +514,10 @@ SUBROUTINE downscale_forcings (& IMPLICIT NONE - integer, parameter :: S = 1370 ! solar constant (W/m**2) - real(r8), parameter :: thr = 85*PI/180 ! threshold of ?? - ! ARGUMENTS: - logical, intent(in) :: glaciers ! true: glacier column (itypwat = 3) - real(r8), intent(in) :: julian_day ! day of year - real(r8), intent(in) :: coszen ! cosine of sun zenith angle at an hour - real(r8), intent(in) :: cosazi ! cosine of sun azimuth angle at an hour - real(r8), intent(in) :: alb ! blue sky albedo - - ! topography-based factor - real(r8), intent(in) :: svf_c ! sky view factor - real(r8), intent(in) :: cur_c ! curvature - real(r8), intent(in) :: sf_lut_c (1:num_azimuth,1:num_zenith) ! look up table of shadow mask of a patch - real(r8), intent(in) :: asp_type_c (1:num_type) ! topographic aspect of each type of one patch - real(r8), intent(in) :: slp_type_c (1:num_type) ! topographic slope of each character of one patch - real(r8), intent(in) :: area_type_c(1:num_type) ! area percentage of each character of one patch - - ! non-downscaled fields: + logical, intent(in) :: glaciers ! true: glacier column (itypwat = 3) + + ! Gridcell-level non-downscaled fields: real(r8), intent(in) :: forc_topo_g ! atmospheric surface height [m] real(r8), intent(in) :: forc_maxelv_g ! max atmospheric surface height [m] real(r8), intent(in) :: forc_t_g ! atmospheric temperature [Kelvin] @@ -147,12 +528,9 @@ SUBROUTINE downscale_forcings (& real(r8), intent(in) :: forc_prc_g ! convective precipitation in grid [mm/s] real(r8), intent(in) :: forc_prl_g ! large-scale precipitation in grid [mm/s] real(r8), intent(in) :: forc_lwrad_g ! grid downward longwave [W/m**2] - real(r8), intent(in) :: forc_swrad_g ! grid downward shortwave [W/m**2] - real(r8), intent(in) :: forc_hgt_g ! atmospheric reference height [m] - real(r8), intent(in) :: forc_us_g ! eastward wind [m/s] - real(r8), intent(in) :: forc_vs_g ! northward wind [m/s] + real(r8), intent(in) :: forc_hgt_grc ! atmospheric reference height [m] - ! downscaled fields: + ! Column-level downscaled fields: real(r8), intent(in) :: forc_topo_c ! column surface height [m] real(r8), intent(out) :: forc_t_c ! atmospheric temperature [Kelvin] real(r8), intent(out) :: forc_th_c ! atmospheric potential temperature [Kelvin] @@ -162,38 +540,40 @@ SUBROUTINE downscale_forcings (& real(r8), intent(out) :: forc_prc_c ! column convective precipitation [mm/s] real(r8), intent(out) :: forc_prl_c ! column large-scale precipitation [mm/s] real(r8), intent(out) :: forc_lwrad_c ! column downward longwave [W/m**2] - real(r8), intent(out) :: forc_swrad_c ! column downward shortwave [W/m**2] - real(r8), intent(out) :: forc_us_c ! column eastward wind [m/s] - real(r8), intent(out) :: forc_vs_c ! column northward wind [m/s] - - ! Local variables for topo downscaling: + + ! Local variables for topo downscaling: + real(r8) :: hsurf_g, hsurf_c real(r8) :: Hbot, zbot real(r8) :: tbot_g, pbot_g, thbot_g, qbot_g, qs_g, es_g, rhos_g real(r8) :: tbot_c, pbot_c, thbot_c, qbot_c, qs_c, es_c, rhos_c real(r8) :: rhos_c_estimate, rhos_g_estimate real(r8) :: dum1, dum2 + real(r8) :: max_elev_c ! the maximum column level elevation value within the grid real(r8) :: delta_prc_c ! deviation of the column convective precipitation from the grid level precipitation real(r8) :: delta_prl_c ! deviation of the column large-scale precipitation from the grid level precipitation -!----------------------------------------------------------------------------- - ! -------------------------------------------------------------------------------------- - ! 1. adjust air temperature, potential temperature, specific humidity, pressure, density - ! -------------------------------------------------------------------------------------- + ! ! Downscale forc_t, forc_th, forc_q, forc_pbot, and forc_rho to columns. hsurf_g = forc_topo_g ! gridcell sfc elevation tbot_g = forc_t_g ! atm sfc temp thbot_g = forc_th_g ! atm sfc pot temp qbot_g = forc_q_g ! atm sfc spec humid pbot_g = forc_pbot_g ! atm sfc pressure rhos_g = forc_rho_g ! atm density - zbot = forc_hgt_g ! atm ref height + zbot = forc_hgt_grc ! atm ref height + + ! This is a simple downscaling procedure + ! Note that forc_hgt, forc_u, forc_v and solar radiation are not downscaled. - hsurf_c = forc_topo_c ! column sfc elevation - tbot_c = tbot_g-lapse_rate*(hsurf_c-hsurf_g) ! adjust [temp] for column + !asp_c = forc_asp_c(c) + !cur_c = forc_cur_c(c) + + hsurf_c = forc_topo_c ! column sfc elevation + tbot_c = tbot_g-lapse_rate*(hsurf_c-hsurf_g) ! adjust temp for column Hbot = rair*0.5_r8*(tbot_g+tbot_c)/grav ! scale ht at avg temp - pbot_c = pbot_g*exp(-(hsurf_c-hsurf_g)/Hbot) ! adjust [press] for column + pbot_c = pbot_g*exp(-(hsurf_c-hsurf_g)/Hbot) ! adjust press for column ! Derivation of potential temperature calculation: ! @@ -210,46 +590,23 @@ SUBROUTINE downscale_forcings (& ! = tbot_c * exp((zbot/Hbot) * (rair/cpair)) ! But we want everything expressed in delta form, resulting in: - thbot_c = thbot_g + (tbot_c - tbot_g)*exp((zbot/Hbot)*(rair/cpair)) ! adjust [pot temp] for column + thbot_c = thbot_g + (tbot_c - tbot_g)*exp((zbot/Hbot)*(rair/cpair)) ! adjust pot temp for column CALL Qsadv(tbot_g,pbot_g,es_g,dum1,qs_g,dum2) ! es, qs for gridcell CALL Qsadv(tbot_c,pbot_c,es_c,dum1,qs_c,dum2) ! es, qs for column - qbot_c = qbot_g*(qs_c/qs_g) ! adjust [q] for column + qbot_c = qbot_g*(qs_c/qs_g) ! adjust q for column rhos_c_estimate = rhos(qbot=qbot_c, pbot=pbot_c, tbot=tbot_c) rhos_g_estimate = rhos(qbot=qbot_g, pbot=pbot_g, tbot=tbot_g) - rhos_c = rhos_g * (rhos_c_estimate / rhos_g_estimate) ! adjust [density] for column + rhos_c = rhos_g * (rhos_c_estimate / rhos_g_estimate) ! adjust density for column - ! save forc_t_c = tbot_c forc_th_c = thbot_c forc_q_c = qbot_c forc_pbot_c = pbot_c forc_rho_c = rhos_c - ! -------------------------------------------------------------------------------------- - ! 2. adjust wind speed - ! -------------------------------------------------------------------------------------- - CALL downscale_wind(forc_us_g, forc_vs_g, & - forc_us_c, forc_vs_c, & - slp_type_c, asp_type_c, area_type_c, cur_c) - - ! -------------------------------------------------------------------------------------- - ! 3. adjust longwave radiation and shortwave radiation - ! -------------------------------------------------------------------------------------- - CALL downscale_longwave (glaciers, & - forc_topo_g, forc_t_g, forc_q_g, forc_pbot_g, forc_lwrad_g, & - forc_topo_c, forc_t_c, forc_q_c, forc_pbot_c, forc_lwrad_c) - - CALL downscale_shortwave(& - forc_topo_g, forc_pbot_g, forc_swrad_g, & - forc_topo_c, forc_pbot_c, forc_swrad_c, & - julian_day, coszen, cosazi, alb, & - slp_type_c, asp_type_c, svf_c, sf_lut_c, area_type_c) - - ! -------------------------------------------------------------------------------------- - ! 4. adjust precipitation - ! -------------------------------------------------------------------------------------- + ! adjust precipitation IF (trim(DEF_DS_precipitation_adjust_scheme) == 'I') THEN ! Tesfa et al, 2020: Exploring Topography-Based Methods for Downscaling ! Subgrid Precipitation for Use in Earth System Models. Equation (5) @@ -279,8 +636,8 @@ SUBROUTINE downscale_forcings (& ! Mei, Y., Maggioni, V., Houser, P., Xue, Y., & Rouf, T. (2020). A nonparametric statistical ! technique for spatial downscaling of precipitation over High Mountain Asia. Water Resources Research, ! 56, e2020WR027472. https://doi.org/ 10.1029/2020WR027472 - - ! We implement this scheme in MOD_Forcing.F90 + ! Change Random forest model to AutoML model. + !TODO: Lu Li; Need to done after all other forcings are downscaled END IF IF (forc_prl_c < 0) THEN @@ -293,81 +650,25 @@ SUBROUTINE downscale_forcings (& forc_prc_c = 0. END IF - END SUBROUTINE downscale_forcings -!----------------------------------------------------------------------------- + CALL downscale_longwave_1c (glaciers, & + forc_topo_g, forc_t_g, forc_q_g, forc_pbot_g, forc_lwrad_g, & + forc_topo_c, forc_t_c, forc_q_c, forc_pbot_c, forc_lwrad_c) - SUBROUTINE downscale_wind(forc_us_g, forc_vs_g, & - forc_us_c, forc_vs_c, & - slp_type_c, asp_type_c, area_type_c, cur_c) + END SUBROUTINE downscale_forcings_1c -!----------------------------------------------------------------------------- -! DESCRIPTION: -! Downscale wind speed -! -! Liston, G. E. and Elder, K.: A meteorological distribution system -! for high-resolution terrestrial modeling (MicroMet), J. Hydrometeorol., 7, 217-234, 2006. -!----------------------------------------------------------------------------- - IMPLICIT NONE - - ! ARGUMENTS: - real(r8), intent(in) :: forc_us_g ! eastward wind (m/s) - real(r8), intent(in) :: forc_vs_g ! northward wind (m/s) - real(r8), intent(out) :: forc_us_c ! adjusted eastward wind (m/s) - real(r8), intent(out) :: forc_vs_c ! adjusted northward wind (m/s) - - real(r8), intent(in) :: cur_c ! curvature - real(r8), intent(in) :: asp_type_c (1:num_type) ! topographic aspect of each character of one patch - real(r8), intent(in) :: slp_type_c (1:num_type) ! topographic slope of each character of one patch - real(r8), intent(in) :: area_type_c (1:num_type) ! area percentage of each character of one patch - - ! local variables - real(r8) :: wind_dir ! wind direction - real(r8) :: ws_g ! non-downscaled wind speed - real(r8) :: wind_dir_slp (1:num_type) ! the slope in the direction of the wind - real(r8) :: ws_c_type(1:num_type) ! downscaled wind speed of each type in each patch - real(r8) :: ws_c ! downscaled wind speed - integer :: g, c, i !----------------------------------------------------------------------------- - ! calculate wind direction - IF (forc_us_g == 0.) THEN - wind_dir = 0 - ELSE - wind_dir = atan(forc_vs_g /forc_us_g) - ENDIF - - ! non-adjusted wind speed - ws_g = sqrt(forc_vs_g *forc_vs_g +forc_us_g *forc_us_g ) - - ! compute the slope in the direction of the wind - DO i = 1, num_type - wind_dir_slp(i) = slp_type_c(i)*cos(wind_dir-asp_type_c(i)*PI/180) - ENDDO - - ! compute wind speed ajustment - DO i = 1, num_type - ws_c_type(i) = ws_g *(1+(0.58*wind_dir_slp(i))+0.42*cur_c)*area_type_c(i) - ENDDO - - ! adjusted wind speed - ws_c = sum(ws_c_type(:)) - forc_us_c = ws_c*cos(wind_dir) - forc_vs_c = ws_c*sin(wind_dir) - - END SUBROUTINE downscale_wind - -!----------------------------------------------------------------------------- - - SUBROUTINE downscale_longwave (glaciers, & + SUBROUTINE downscale_longwave_1c (glaciers, & forc_topo_g, forc_t_g, forc_q_g, forc_pbot_g, forc_lwrad_g, & forc_topo_c, forc_t_c, forc_q_c, forc_pbot_c, forc_lwrad_c) !----------------------------------------------------------------------------- ! DESCRIPTION: -! Downscale longwave radiation +! Downscale longwave radiation from gridcell to column +! Must be done AFTER temperature downscaling !----------------------------------------------------------------------------- IMPLICIT NONE @@ -380,12 +681,11 @@ SUBROUTINE downscale_longwave (glaciers, & real(r8), intent(in) :: forc_q_g ! atmospheric specific humidity [kg/kg] real(r8), intent(in) :: forc_pbot_g ! atmospheric pressure [Pa] real(r8), intent(in) :: forc_lwrad_g ! downward longwave (W/m**2) - real(r8), intent(in) :: forc_topo_c ! column surface height (m) real(r8), intent(in) :: forc_t_c ! atmospheric temperature [Kelvin] real(r8), intent(in) :: forc_q_c ! atmospheric specific humidity [kg/kg] real(r8), intent(in) :: forc_pbot_c ! atmospheric pressure [Pa] - real(r8), intent(out):: forc_lwrad_c ! downward longwave (W/m**2) + real(r8), intent(out) :: forc_lwrad_c ! downward longwave (W/m**2) ! LOCAL VARIABLES: real(r8) :: hsurf_c ! column-level elevation (m) @@ -401,10 +701,13 @@ SUBROUTINE downscale_longwave (glaciers, & real(r8), parameter :: lapse_rate_longwave = 0.032_r8 ! longwave radiation lapse rate (W m-2 m-1) real(r8), parameter :: longwave_downscaling_limit = 0.5_r8 ! relative limit for how much longwave downscaling can be done (unitless) -!-------------------------------------------------------------------------- + !-------------------------------------------------------------------------- - ! Initialize (needs to be done for ALL active columns) + ! Initialize column forcing (needs to be done for ALL active columns) forc_lwrad_c = forc_lwrad_g + + ! Do the downscaling + hsurf_g = forc_topo_g hsurf_c = forc_topo_c @@ -443,8 +746,8 @@ SUBROUTINE downscale_longwave (glaciers, & forc_lwrad_c = forc_lwrad_g & - 4.0_r8 * forc_lwrad_g/(0.5_r8*(forc_t_c+forc_t_g)) & * lapse_rate * (hsurf_c - hsurf_g) - ENDIF - ENDIF + END IF + END IF ! But ensure that we don't depart too far from the atmospheric forcing value: ! negative values of lwrad are certainly bad, but small positive values might @@ -455,185 +758,7 @@ SUBROUTINE downscale_longwave (glaciers, & forc_lwrad_c = min(forc_lwrad_c, forc_lwrad_g * (1._r8 + longwave_downscaling_limit)) forc_lwrad_c = max(forc_lwrad_c, forc_lwrad_g * (1._r8 - longwave_downscaling_limit)) - END SUBROUTINE downscale_longwave - -!----------------------------------------------------------------------------- - SUBROUTINE downscale_shortwave( & - forc_topo_g, forc_pbot_g, forc_swrad_g, & - forc_topo_c, forc_pbot_c, forc_swrad_c, & - julian_day, coszen, cosazi, alb, & - slp_type_c, asp_type_c, svf_c, sf_lut_c, area_type_c) -!----------------------------------------------------------------------------- -! DESCRIPTION: -! -! Rouf, T., Mei, Y., Maggioni, V., Houser, P., & Noonan, M. (2020). A Physically Based -! Atmospheric Variables Downscaling Technique. Journal of Hydrometeorology, -! 21(1), 93–108. https://doi.org/10.1175/JHM-D-19-0109.1 -! -! Sisi Chen, Lu Li, Yongjiu Dai, et al. Exploring Topography Downscaling Methods for -! Hyper-Resolution Land Surface Modeling. Authorea. April 25, 2024. -! DOI: 10.22541/au.171403656.68476353/v1 -! -! Must be done after downscaling of surface pressure -!----------------------------------------------------------------------------- - - IMPLICIT NONE - - integer, parameter :: S = 1370 ! solar constant (W/m**2) - real(r8), parameter :: thr = 85*PI/180 ! threshold of ?? - real(r8), parameter :: shortwave_downscaling_limit = 0.5_r8 ! relative limit for how much shortwave downscaling can be done (unitless) - - ! ARGUMENTS: - real(r8), intent(in) :: julian_day ! day of year - real(r8), intent(in) :: coszen ! zenith angle at an hour - real(r8), intent(in) :: cosazi ! azimuth angle at an hour - real(r8), intent(in) :: alb ! blue sky albedo - - real(r8), intent(in) :: forc_topo_g ! atmospheric surface height (m) - real(r8), intent(in) :: forc_pbot_g ! atmospheric pressure [Pa] - real(r8), intent(in) :: forc_swrad_g ! downward shortwave (W/m**2) - - real(r8), intent(in) :: forc_topo_c ! column surface height (m) - real(r8), intent(in) :: forc_pbot_c ! atmospheric pressure [Pa] - real(r8), intent(out):: forc_swrad_c ! downward shortwave (W/m**2) - - real(r8), intent(in) :: svf_c ! sky view factor - real(r8), intent(in) :: sf_lut_c (1:num_azimuth,1:num_zenith) ! look up table of shadow factor - real(r8), intent(in) :: asp_type_c (1:num_type) ! topographic aspect of each character of one patch (°) - real(r8), intent(in) :: slp_type_c (1:num_type) ! topographic slope of each character of one patch - real(r8), intent(in) :: area_type_c(1:num_type) ! area percentage of each character of one patch - - ! LOCAL VARIABLES: - real(r8) :: zen_rad, azi_rad, zen_deg, azi_deg ! rad and deg of sun zenith and azimuth angles - integer :: idx_azi, idx_zen ! index used to cal shadow factor from look up table - real(r8) :: sf_c ! shadow factor - real(r8) :: rt_R ! The ratio of the current distance between the sun and the earth ! to the average distance between the sun and the earth - real(r8) :: toa_swrad ! top of atmosphere shortwave radiation - real(r8) :: clr_idx ! atmospheric transparency - real(r8) :: diff_wgt ! diffuse weight - real(r8) :: k_c ! column broadband attenuation coefficient [Pa^-1] - real(r8) :: opt_factor ! optical length factor - real(r8) :: a_p - real(r8) :: svf, balb - - real(r8) :: diff_swrad_g, beam_swrad_g ! diffuse and beam radiation - real(r8) :: diff_swrad_c, beam_swrad_c, refl_swrad_c! downscaled diffuse and beam radiation - real(r8) :: beam_swrad_type (1:num_type) ! beam radiation of one characterized patch - real(r8) :: refl_swrad_type (1:num_type) ! reflect radiation of one characterized patch - real(r8) :: tcf_type (1:num_type) ! terrain configure factor - real(r8) :: cosill_type (1:num_type) ! illumination angle (cos) at defined types - - integer :: i - -!----------------------------------------------------------------------------- - - ! calculate shadow factor according to sun zenith and azimuth angle - zen_rad = acos(coszen) - azi_rad = acos(cosazi) - zen_deg = zen_rad*180/PI ! turn deg - azi_deg = azi_rad*180.0/PI ! turn deg - - idx_azi = INT(azi_deg*num_azimuth/360) - idx_zen = INT(zen_deg*num_zenith/90) - IF (idx_azi==0) idx_azi = 1 - IF (idx_zen==0) idx_zen = 1 - - sf_c = sf_lut_c(idx_azi, idx_zen) - IF (sf_c<0) sf_c = 0 - IF (sf_c>1) sf_c = 1 - - ! calculate top-of-atmosphere incident shortwave radiation - rt_R = 1-0.01672*cos(0.9856*(julian_day-4)) - toa_swrad = S*(rt_R**2)*coszen - - ! calculate clearness index - IF (toa_swrad.le.0) THEN - clr_idx = 1 - ELSE - clr_idx = forc_swrad_g/toa_swrad - ENDIF - IF (clr_idx>1) clr_idx = 1 - - ! calculate diffuse weight - ! Ruiz-Arias, J. A., Alsamamra, H., Tovar-Pescador, J., & Pozo-Vázquez, D. (2010). - ! Proposal of a regressive model for the hourly diffuse solar radiation under all sky - ! conditions. Energy Conversion and Management, 51(5), 881–893. - ! https://doi.org/10.1016/j.enconman.2009.11.024 - diff_wgt = 0.952-1.041*exp(-1*exp(2.3-4.702*clr_idx)) - IF (diff_wgt>1) diff_wgt = 1 - IF (diff_wgt<0) diff_wgt = 0 - - ! calculate diffuse and beam radiation - diff_swrad_g = forc_swrad_g*diff_wgt - beam_swrad_g = forc_swrad_g*(1-diff_wgt) - - ! calcualte broadband attenuation coefficient [Pa^-1] - IF (clr_idx.le.0) THEN - k_c = 0 - ELSE - k_c = log(clr_idx)/forc_pbot_c - ENDIF - - ! calculate factor to account for the difference of optical path length due to pressure difference - opt_factor = exp(k_c*(forc_pbot_g-forc_pbot_c)) - ! Control the boundary of optical path length - IF ((opt_factor>10000).or.(opt_factor<-10000)) opt_factor = 0 - - ! Adjust the zenith angle so that the range of zenith angles is less than 85° - IF (zen_rad>thr) zen_rad=thr - - ! loop for four defined types to downscale beam radiation - DO i = 1, num_type - ! calculate the cosine of solar illumination angle, cos(θ), - ! ranging between −1 and 1, indicates if the sun is below or - ! above the local horizon (note that values lower than 0 are set to 0 indicate self shadow) - cosill_type(i) = cos(slp_type_c(i))+tan(zen_rad)*sin(slp_type_c(i))*cos(asp_type_c(i)*PI/180) - IF (cosill_type(i)>1) cosill_type(i) = 1 - IF (cosill_type(i)<0) cosill_type(i) = 0 - - ! downscaling beam radiation - a_p = area_type_c(i) - IF (a_p.gt.1.0) a_p = 1 - IF (a_p.lt.0) a_p = 0 - beam_swrad_type(i) = sf_c*cosill_type(i)*opt_factor*a_p*beam_swrad_g - ENDDO - beam_swrad_c = sum(beam_swrad_type) - - ! downscaling diffuse radiation - svf = svf_c - IF (svf>1) svf = 1 - IF (svf<0) svf = 0 - diff_swrad_c = svf*diff_swrad_g - - ! downscaling reflected radiation - balb = alb - DO i = 1, num_type - tcf_type(i) = (1+cos(slp_type_c(i)))/2-svf - IF (tcf_type(i)<0) tcf_type(i) = 0 - - IF (isnan(alb)) THEN - refl_swrad_type(i) = -1.0e36 - ELSE - IF ((balb<0).or.(balb>1)) balb = 0 - refl_swrad_type(i) = balb*tcf_type(i)*(beam_swrad_c*coszen+(1-svf)*diff_swrad_c) - ENDIF - ENDDO - refl_swrad_c = sum(refl_swrad_type, mask = refl_swrad_type /= -1.0e36) - forc_swrad_c = beam_swrad_c+diff_swrad_c+refl_swrad_c - - ! But ensure that we don't depart too far from the atmospheric forcing value: - ! negative values of swrad are certainly bad, but small positive values might - ! also be bad. We can especially run into trouble due to the normalization: a - ! small swrad value in one column can lead to a big normalization factor, - ! leading to huge swrad values in other columns. - - forc_swrad_c = min(forc_swrad_c, & - forc_swrad_g * (1._r8 + shortwave_downscaling_limit)) - forc_swrad_c = max(forc_swrad_c, & - forc_swrad_g * (1._r8 - shortwave_downscaling_limit)) - ! for normalize - IF (forc_swrad_c==0.) forc_swrad_c = 0.0001 - END SUBROUTINE downscale_shortwave + END SUBROUTINE downscale_longwave_1c END MODULE MOD_ForcingDownscaling diff --git a/main/MOD_HistGridded.F90 b/main/MOD_HistGridded.F90 old mode 100755 new mode 100644 index 6b40edbd..f82259f6 --- a/main/MOD_HistGridded.F90 +++ b/main/MOD_HistGridded.F90 @@ -41,24 +41,26 @@ MODULE MOD_HistGridded !--------------------------------------- SUBROUTINE hist_gridded_init (dir_hist) - USE MOD_Vars_Global - USE MOD_Namelist - USE MOD_Block - USE MOD_LandPatch + USE MOD_Vars_Global + USE MOD_Namelist + USE MOD_Block + USE MOD_LandPatch #ifdef URBAN_MODEL - USE MOD_LandUrban + USE MOD_LandUrban #endif - USE MOD_Vars_1DAccFluxes - USE MOD_Forcing, only : gforc + USE MOD_Vars_1DAccFluxes + USE MOD_Forcing, only : gforc #ifdef SinglePoint - USE MOD_SingleSrfData + USE MOD_SingleSrfData #endif - USE MOD_Utils - IMPLICIT NONE + USE MOD_Utils + IMPLICIT NONE + + character(len=*), intent(in) :: dir_hist - character(len=*), intent(in) :: dir_hist - type(block_data_real8_2d) :: gridarea - integer :: iblkme, xblk, yblk, xloc, yloc, xglb, yglb + ! Local Variables + type(block_data_real8_2d) :: gridarea + integer :: iblkme, xblk, yblk, xloc, yloc, xglb, yglb IF (DEF_hist_grid_as_forcing) THEN CALL ghist%define_by_copy (gforc) @@ -110,30 +112,30 @@ SUBROUTINE flux_map_and_write_2d ( & acc_vec, file_hist, varname, itime_in_file, sumarea, filter, & longname, units) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Namelist - USE MOD_DataType - USE MOD_Block - USE MOD_Grid - USE MOD_Vars_1DAccFluxes, only: nac - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - real(r8), intent(inout) :: acc_vec(:) - character(len=*), intent(in) :: file_hist - character(len=*), intent(in) :: varname - integer, intent(in) :: itime_in_file - character(len=*), intent(in) :: longname - character(len=*), intent(in) :: units - - type(block_data_real8_2d), intent(in) :: sumarea - logical, intent(in) :: filter(:) - - ! Local variables - type(block_data_real8_2d) :: flux_xy_2d - integer :: iblkme, xblk, yblk, xloc, yloc - integer :: compress + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist + USE MOD_DataType + USE MOD_Block + USE MOD_Grid + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + real(r8), intent(inout) :: acc_vec(:) + character(len=*), intent(in) :: file_hist + character(len=*), intent(in) :: varname + integer, intent(in) :: itime_in_file + character(len=*), intent(in) :: longname + character(len=*), intent(in) :: units + + type(block_data_real8_2d), intent(in) :: sumarea + logical, intent(in) :: filter(:) + + ! Local variables + type(block_data_real8_2d) :: flux_xy_2d + integer :: iblkme, xblk, yblk, xloc, yloc + integer :: compress IF (p_is_worker) WHERE (acc_vec /= spval) acc_vec = acc_vec / nac IF (p_is_io) CALL allocate_block_data (ghist, flux_xy_2d) @@ -175,30 +177,30 @@ SUBROUTINE flux_map_and_write_urb_2d ( & acc_vec, file_hist, varname, itime_in_file, sumarea, filter, & longname, units) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Namelist - USE MOD_DataType - USE MOD_Block - USE MOD_Grid - USE MOD_Vars_1DAccFluxes, only: nac - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - real(r8), intent(inout) :: acc_vec(:) - character(len=*), intent(in) :: file_hist - character(len=*), intent(in) :: varname - integer, intent(in) :: itime_in_file - character(len=*), intent(in) :: longname - character(len=*), intent(in) :: units - - type(block_data_real8_2d), intent(in) :: sumarea - logical, intent(in) :: filter(:) - - ! Local variables - type(block_data_real8_2d) :: flux_xy_2d - integer :: iblkme, xblk, yblk, xloc, yloc - integer :: compress + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist + USE MOD_DataType + USE MOD_Block + USE MOD_Grid + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + real(r8), intent(inout) :: acc_vec(:) + character(len=*), intent(in) :: file_hist + character(len=*), intent(in) :: varname + integer, intent(in) :: itime_in_file + character(len=*), intent(in) :: longname + character(len=*), intent(in) :: units + + type(block_data_real8_2d), intent(in) :: sumarea + logical, intent(in) :: filter(:) + + ! Local variables + type(block_data_real8_2d) :: flux_xy_2d + integer :: iblkme, xblk, yblk, xloc, yloc + integer :: compress IF (p_is_worker) WHERE (acc_vec /= spval) acc_vec = acc_vec / nac IF (p_is_io) CALL allocate_block_data (ghist, flux_xy_2d) @@ -240,32 +242,32 @@ SUBROUTINE flux_map_and_write_3d ( & acc_vec, file_hist, varname, itime_in_file, dim1name, lb1, ndim1, sumarea, filter, & longname, units) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Namelist - USE MOD_DataType - USE MOD_Block - USE MOD_Grid - USE MOD_Vars_1DAccFluxes, only: nac - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - real(r8), intent(inout) :: acc_vec(:,:) - character(len=*), intent(in) :: file_hist - character(len=*), intent(in) :: varname - integer, intent(in) :: itime_in_file - character(len=*), intent(in) :: dim1name - integer, intent(in) :: lb1, ndim1 - - type(block_data_real8_2d), intent(in) :: sumarea - logical, intent(in) :: filter(:) - character (len=*), intent(in) :: longname - character (len=*), intent(in) :: units - - ! Local variables - type(block_data_real8_3d) :: flux_xy_3d - integer :: iblkme, xblk, yblk, xloc, yloc, i1 - integer :: compress + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist + USE MOD_DataType + USE MOD_Block + USE MOD_Grid + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + real(r8), intent(inout) :: acc_vec(:,:) + character(len=*), intent(in) :: file_hist + character(len=*), intent(in) :: varname + integer, intent(in) :: itime_in_file + character(len=*), intent(in) :: dim1name + integer, intent(in) :: lb1, ndim1 + + type(block_data_real8_2d), intent(in) :: sumarea + logical, intent(in) :: filter(:) + character (len=*), intent(in) :: longname + character (len=*), intent(in) :: units + + ! Local variables + type(block_data_real8_3d) :: flux_xy_3d + integer :: iblkme, xblk, yblk, xloc, yloc, i1 + integer :: compress IF (p_is_worker) THEN WHERE (acc_vec /= spval) acc_vec = acc_vec / nac @@ -314,32 +316,32 @@ SUBROUTINE flux_map_and_write_4d ( & dim1name, lb1, ndim1, dim2name, lb2, ndim2, & sumarea, filter, longname, units) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Namelist - USE MOD_DataType - USE MOD_Block - USE MOD_Grid - USE MOD_Vars_1DAccFluxes, only: nac - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - real(r8), intent(inout) :: acc_vec(:,:,:) - character(len=*), intent(in) :: file_hist - character(len=*), intent(in) :: varname - integer, intent(in) :: itime_in_file - character(len=*), intent(in) :: dim1name, dim2name - integer, intent(in) :: lb1, ndim1, lb2, ndim2 - - type(block_data_real8_2d), intent(in) :: sumarea - logical, intent(in) :: filter(:) - character (len=*), intent(in) :: longname - character (len=*), intent(in) :: units - - ! Local variables - type(block_data_real8_4d) :: flux_xy_4d - integer :: iblkme, xblk, yblk, xloc, yloc, i1, i2 - integer :: compress + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist + USE MOD_DataType + USE MOD_Block + USE MOD_Grid + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + real(r8), intent(inout) :: acc_vec(:,:,:) + character(len=*), intent(in) :: file_hist + character(len=*), intent(in) :: varname + integer, intent(in) :: itime_in_file + character(len=*), intent(in) :: dim1name, dim2name + integer, intent(in) :: lb1, ndim1, lb2, ndim2 + + type(block_data_real8_2d), intent(in) :: sumarea + logical, intent(in) :: filter(:) + character (len=*), intent(in) :: longname + character (len=*), intent(in) :: units + + ! Local variables + type(block_data_real8_4d) :: flux_xy_4d + integer :: iblkme, xblk, yblk, xloc, yloc, i1, i2 + integer :: compress IF (p_is_worker) THEN WHERE(acc_vec /= spval) acc_vec = acc_vec / nac @@ -389,30 +391,30 @@ SUBROUTINE flux_map_and_write_ln ( & acc_vec, file_hist, varname, itime_in_file, sumarea, filter, & longname, units) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Namelist - USE MOD_DataType - USE MOD_Block - USE MOD_Grid - USE MOD_Vars_1DAccFluxes, only: nac_ln - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - real(r8), intent(inout) :: acc_vec(:) - character(len=*), intent(in) :: file_hist - character(len=*), intent(in) :: varname - integer, intent(in) :: itime_in_file - - type(block_data_real8_2d), intent(in) :: sumarea - logical, intent(in) :: filter(:) - character (len=*), intent(in), optional :: longname - character (len=*), intent(in), optional :: units - - ! Local variables - type(block_data_real8_2d) :: flux_xy_2d - integer :: i, iblkme, xblk, yblk, xloc, yloc - integer :: compress + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist + USE MOD_DataType + USE MOD_Block + USE MOD_Grid + USE MOD_Vars_1DAccFluxes, only: nac_ln + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + real(r8), intent(inout) :: acc_vec(:) + character(len=*), intent(in) :: file_hist + character(len=*), intent(in) :: varname + integer, intent(in) :: itime_in_file + + type(block_data_real8_2d), intent(in) :: sumarea + logical, intent(in) :: filter(:) + character (len=*), intent(in), optional :: longname + character (len=*), intent(in), optional :: units + + ! Local variables + type(block_data_real8_2d) :: flux_xy_2d + integer :: i, iblkme, xblk, yblk, xloc, yloc + integer :: compress IF (p_is_worker) THEN DO i = lbound(acc_vec,1), ubound(acc_vec,1) @@ -461,28 +463,29 @@ END SUBROUTINE flux_map_and_write_ln SUBROUTINE hist_gridded_write_time ( & filename, dataname, time, itime) - USE MOD_Namelist - USE MOD_Grid - USE MOD_Block - USE MOD_SPMD_Task - IMPLICIT NONE + USE MOD_Namelist + USE MOD_Grid + USE MOD_Block + USE MOD_SPMD_Task + IMPLICIT NONE - character (len=*), intent(in) :: filename - character (len=*), intent(in) :: dataname + character (len=*), intent(in) :: filename + character (len=*), intent(in) :: dataname - integer, intent(in) :: time(3) - integer, intent(out) :: itime + integer, intent(in) :: time(3) + integer, intent(out) :: itime - ! Local variables - character(len=256) :: fileblock - integer :: iblkme, iblk, jblk - logical :: fexists + ! Local variables + character(len=256) :: fileblock + integer :: iblkme, iblk, jblk + logical :: fexists IF (trim(DEF_HIST_mode) == 'one') THEN IF (p_is_master) THEN #ifdef USEMPI IF (DEF_HIST_WriteBack) THEN CALL hist_writeback_latlon_time (filename, dataname, time, hist_concat) + itime = 1 ELSE #endif inquire (file=filename, exist=fexists) @@ -520,9 +523,7 @@ SUBROUTINE hist_gridded_write_time ( & ENDIF #ifdef USEMPI - IF (.not. DEF_HIST_WriteBack) THEN - CALL mpi_bcast (itime, 1, MPI_INTEGER, p_root, p_comm_glb, p_err) - ENDIF + CALL mpi_bcast (itime, 1, MPI_INTEGER, p_root, p_comm_glb, p_err) #endif ELSEIF (trim(DEF_HIST_mode) == 'block') THEN @@ -561,31 +562,31 @@ END SUBROUTINE hist_gridded_write_time SUBROUTINE hist_write_var_real8_2d ( & filename, dataname, grid, itime, wdata, compress, longname, units) - USE MOD_Namelist - USE MOD_Block - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only: spval - IMPLICIT NONE + USE MOD_Namelist + USE MOD_Block + USE MOD_Grid + USE MOD_DataType + USE MOD_SPMD_Task + USE MOD_Vars_Global, only: spval + IMPLICIT NONE - character (len=*), intent(in) :: filename - character (len=*), intent(in) :: dataname - type (grid_type), intent(in) :: grid - integer, intent(in) :: itime + character (len=*), intent(in) :: filename + character (len=*), intent(in) :: dataname + type (grid_type), intent(in) :: grid + integer, intent(in) :: itime - type (block_data_real8_2d), intent(in) :: wdata + type (block_data_real8_2d), intent(in) :: wdata - integer, intent(in) :: compress - character(len=*), intent(in) :: longname - character(len=*), intent(in) :: units + integer, intent(in) :: compress + character(len=*), intent(in) :: longname + character(len=*), intent(in) :: units - ! Local variables - integer :: iblkme, iblk, jblk, idata, ixseg, iyseg - integer :: xcnt, ycnt, xbdsp, ybdsp, xgdsp, ygdsp - integer :: rmesg(3), smesg(3), isrc - character(len=256) :: fileblock - real(r8), allocatable :: rbuf(:,:), sbuf(:,:), vdata(:,:) + ! Local variables + integer :: iblkme, iblk, jblk, idata, ixseg, iyseg + integer :: xcnt, ycnt, xbdsp, ybdsp, xgdsp, ygdsp + integer :: rmesg(3), smesg(3), isrc + character(len=256) :: fileblock + real(r8), allocatable :: rbuf(:,:), sbuf(:,:), vdata(:,:) IF (trim(DEF_HIST_mode) == 'one') THEN @@ -650,8 +651,15 @@ SUBROUTINE hist_write_var_real8_2d ( & #ifdef USEMPI IF (.not. DEF_HIST_WriteBack) THEN #endif - CALL ncio_write_serial_time (filename, dataname, itime, vdata, & - 'lon', 'lat', 'time', compress) + IF (.not. & + ((trim(dataname) == 'landarea') .or. (trim(dataname) == 'landfraction'))) THEN + + CALL ncio_write_serial_time (filename, dataname, itime, vdata, & + 'lon', 'lat', 'time', compress) + + ELSEIF (itime == 1) THEN + CALL ncio_write_serial (filename, dataname, vdata, 'lon', 'lat', compress) + ENDIF IF (itime == 1) THEN CALL ncio_put_attr (filename, dataname, 'long_name', longname) @@ -716,8 +724,16 @@ SUBROUTINE hist_write_var_real8_2d ( & CALL get_filename_block (filename, iblk, jblk, fileblock) - CALL ncio_write_serial_time (fileblock, dataname, itime, & - wdata%blk(iblk,jblk)%val, 'lon', 'lat', 'time', compress) + IF (.not. & + ((trim(dataname) == 'landarea') .or. (trim(dataname) == 'landfraction'))) THEN + + CALL ncio_write_serial_time (fileblock, dataname, itime, & + wdata%blk(iblk,jblk)%val, 'lon', 'lat', 'time', compress) + + ELSEIF (itime == 1) THEN + CALL ncio_write_serial (fileblock, dataname, & + wdata%blk(iblk,jblk)%val, 'lon', 'lat', compress) + ENDIF ENDDO @@ -730,32 +746,32 @@ END SUBROUTINE hist_write_var_real8_2d SUBROUTINE hist_write_var_real8_3d ( & filename, dataname, dim1name, grid, itime, wdata, compress, longname, units) - USE MOD_Namelist - USE MOD_Block - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - character (len=*), intent(in) :: filename - character (len=*), intent(in) :: dataname - character (len=*), intent(in) :: dim1name - type (grid_type), intent(in) :: grid - integer, intent(in) :: itime - - type (block_data_real8_3d), intent(in) :: wdata - - integer, intent(in) :: compress - character(len=*), intent(in) :: longname - character(len=*), intent(in) :: units - - ! Local variables - integer :: iblkme, iblk, jblk, idata, ixseg, iyseg - integer :: xcnt, ycnt, ndim1, xbdsp, ybdsp, xgdsp, ygdsp - integer :: rmesg(4), smesg(4), isrc - character(len=256) :: fileblock - real(r8), allocatable :: rbuf(:,:,:), sbuf(:,:,:), vdata(:,:,:) + USE MOD_Namelist + USE MOD_Block + USE MOD_Grid + USE MOD_DataType + USE MOD_SPMD_Task + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + character (len=*), intent(in) :: filename + character (len=*), intent(in) :: dataname + character (len=*), intent(in) :: dim1name + type (grid_type), intent(in) :: grid + integer, intent(in) :: itime + + type (block_data_real8_3d), intent(in) :: wdata + + integer, intent(in) :: compress + character(len=*), intent(in) :: longname + character(len=*), intent(in) :: units + + ! Local variables + integer :: iblkme, iblk, jblk, idata, ixseg, iyseg + integer :: xcnt, ycnt, ndim1, xbdsp, ybdsp, xgdsp, ygdsp + integer :: rmesg(4), smesg(4), isrc + character(len=256) :: fileblock + real(r8), allocatable :: rbuf(:,:,:), sbuf(:,:,:), vdata(:,:,:) IF (trim(DEF_HIST_mode) == 'one') THEN @@ -910,32 +926,32 @@ END SUBROUTINE hist_write_var_real8_3d SUBROUTINE hist_write_var_real8_4d ( & filename, dataname, dim1name, dim2name, grid, itime, wdata, compress, longname, units) - USE MOD_Namelist - USE MOD_Block - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only: spval - IMPLICIT NONE - - character (len=*), intent(in) :: filename - character (len=*), intent(in) :: dataname - character (len=*), intent(in) :: dim1name, dim2name - type (grid_type), intent(in) :: grid - integer, intent(in) :: itime - - type (block_data_real8_4d), intent(in) :: wdata - - integer, intent(in) :: compress - character(len=*), intent(in) :: longname - character(len=*), intent(in) :: units - - ! Local variables - integer :: iblkme, iblk, jblk, idata, ixseg, iyseg - integer :: xcnt, ycnt, ndim1, ndim2, xbdsp, ybdsp, xgdsp, ygdsp - integer :: rmesg(5), smesg(5), isrc - character(len=256) :: fileblock - real(r8), allocatable :: rbuf(:,:,:,:), sbuf(:,:,:,:), vdata(:,:,:,:) + USE MOD_Namelist + USE MOD_Block + USE MOD_Grid + USE MOD_DataType + USE MOD_SPMD_Task + USE MOD_Vars_Global, only: spval + IMPLICIT NONE + + character (len=*), intent(in) :: filename + character (len=*), intent(in) :: dataname + character (len=*), intent(in) :: dim1name, dim2name + type (grid_type), intent(in) :: grid + integer, intent(in) :: itime + + type (block_data_real8_4d), intent(in) :: wdata + + integer, intent(in) :: compress + character(len=*), intent(in) :: longname + character(len=*), intent(in) :: units + + ! Local variables + integer :: iblkme, iblk, jblk, idata, ixseg, iyseg + integer :: xcnt, ycnt, ndim1, ndim2, xbdsp, ybdsp, xgdsp, ygdsp + integer :: rmesg(5), smesg(5), isrc + character(len=256) :: fileblock + real(r8), allocatable :: rbuf(:,:,:,:), sbuf(:,:,:,:), vdata(:,:,:,:) IF (trim(DEF_HIST_mode) == 'one') THEN @@ -1093,17 +1109,17 @@ END SUBROUTINE hist_write_var_real8_4d !------------------ SUBROUTINE hist_write_grid_info (fileblock, grid, iblk, jblk) - USE MOD_Block - USE MOD_Grid - IMPLICIT NONE + USE MOD_Block + USE MOD_Grid + IMPLICIT NONE - character(len=*), intent(in) :: fileblock - type (grid_type), intent(in) :: grid - integer, intent(in) :: iblk, jblk + character(len=*), intent(in) :: fileblock + type (grid_type), intent(in) :: grid + integer, intent(in) :: iblk, jblk - ! Local variable - integer :: yl, yu, xl, xu, nx - real(r8), allocatable :: lat_s(:), lat_n(:), lon_w(:), lon_e(:) + ! Local variable + integer :: yl, yu, xl, xu, nx + real(r8), allocatable :: lat_s(:), lat_n(:), lon_w(:), lon_e(:) allocate (lon_w (grid%xcnt(iblk))) allocate (lon_e (grid%xcnt(iblk))) diff --git a/main/MOD_HistVector.F90 b/main/MOD_HistVector.F90 old mode 100755 new mode 100644 index ddea1761..02ebb8ef --- a/main/MOD_HistVector.F90 +++ b/main/MOD_HistVector.F90 @@ -1,7 +1,7 @@ #include #if (defined UNSTRUCTURED || defined CATCHMENT) -module MOD_HistVector +MODULE MOD_HistVector !---------------------------------------------------------------------------- ! DESCRIPTION: @@ -13,7 +13,7 @@ module MOD_HistVector ! TODO...(need complement) !---------------------------------------------------------------------------- - use MOD_Precision + USE MOD_Precision USE MOD_SPMD_Task USE MOD_Namelist USE MOD_Vars_Global, only : spval @@ -30,7 +30,7 @@ module MOD_HistVector USE MOD_ElmVector #endif -contains +CONTAINS ! ----- subroutines ------ ! -- write history time -- @@ -46,52 +46,52 @@ SUBROUTINE hist_vector_write_time (filename, dataname, time, itime_in_file) ! Local Variables logical :: fexists - if (p_is_master) then + IF (p_is_master) THEN inquire (file=filename, exist=fexists) - if (.not. fexists) then - call ncio_create_file (trim(filename)) + IF (.not. fexists) THEN + CALL ncio_create_file (trim(filename)) CALL ncio_define_dimension(filename, 'time', 0) #ifdef CATCHMENT - call ncio_define_dimension(filename, 'hydrounit', totalnumhru) + CALL ncio_define_dimension(filename, 'hydrounit', totalnumhru) - call ncio_write_serial (filename, 'bsn_hru', eindx_hru, 'hydrounit') + CALL ncio_write_serial (filename, 'bsn_hru', eindx_hru, 'hydrounit') CALL ncio_put_attr (filename, 'bsn_hru', 'long_name', & 'basin index of hydrological units in mesh') - call ncio_write_serial (filename, 'typ_hru' , htype_hru, 'hydrounit') + CALL ncio_write_serial (filename, 'typ_hru' , htype_hru, 'hydrounit') CALL ncio_put_attr (filename, 'typ_hru' , 'long_name', & 'index of hydrological units inside basin') #else - call ncio_define_dimension(filename, 'element', totalnumelm) - call ncio_write_serial (filename, 'elmindex', eindex_glb, 'element') + CALL ncio_define_dimension(filename, 'element', totalnumelm) + CALL ncio_write_serial (filename, 'elmindex', eindex_glb, 'element') CALL ncio_put_attr (filename, 'elmindex', 'long_name', & 'element index in mesh') #endif CALL ncio_write_colm_dimension (filename) - endif + ENDIF - call ncio_write_time (filename, dataname, time, itime_in_file, DEF_HIST_FREQ) + CALL ncio_write_time (filename, dataname, time, itime_in_file, DEF_HIST_FREQ) ENDIF END SUBROUTINE hist_vector_write_time ! ------- - subroutine aggregate_to_vector_and_write_2d ( & + SUBROUTINE aggregate_to_vector_and_write_2d ( & acc_vec_patch, file_hist, varname, itime_in_file, filter, & longname, units) - use MOD_Precision - use MOD_SPMD_Task - use MOD_Namelist + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist USE MOD_LandPatch - use MOD_Vars_1DAccFluxes, only: nac - use MOD_Vars_Global, only: spval - implicit none + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE real(r8), intent(in) :: acc_vec_patch (:) character(len=*), intent(in) :: file_hist @@ -103,17 +103,17 @@ subroutine aggregate_to_vector_and_write_2d ( & character(len=*), intent(in) :: units ! Local variables - INTEGER :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress - LOGICAL, allocatable :: mask(:) - REAL(r8), allocatable :: frac(:) - REAL(r8), allocatable :: acc_vec(:), rcache(:) - REAL(r8) :: sumwt + integer :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress + logical, allocatable :: mask(:) + real(r8), allocatable :: frac(:) + real(r8), allocatable :: acc_vec(:), rcache(:) + real(r8) :: sumwt #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) #endif - if (p_is_worker) then + IF (p_is_worker) THEN #ifdef CATCHMENT numset = numhru #else @@ -152,13 +152,13 @@ subroutine aggregate_to_vector_and_write_2d ( & deallocate(frac) ENDIF ENDDO - end if + ENDIF #ifdef USEMPI mesg = (/p_iam_glb, numset/) - call mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) + CALL mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) IF (numset > 0) THEN - call mpi_send (acc_vec, numset, MPI_REAL8, & + CALL mpi_send (acc_vec, numset, MPI_REAL8, & p_root, mpi_tag_data, p_comm_glb, p_err) ENDIF #endif @@ -178,14 +178,14 @@ subroutine aggregate_to_vector_and_write_2d ( & #ifdef USEMPI DO iwork = 0, p_np_worker-1 - call mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & + CALL mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & mpi_tag_mesg, p_comm_glb, p_stat, p_err) isrc = mesg(1) ndata = mesg(2) IF (ndata > 0) THEN allocate(rcache (ndata)) - call mpi_recv (rcache, ndata, MPI_REAL8, isrc, & + CALL mpi_recv (rcache, ndata, MPI_REAL8, isrc, & mpi_tag_data, p_comm_glb, p_stat, p_err) #ifdef CATCHMENT @@ -210,14 +210,14 @@ subroutine aggregate_to_vector_and_write_2d ( & compress = DEF_HIST_CompressLevel #ifdef CATCHMENT - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & 'hydrounit', 'time', compress) #else - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & 'element', 'time', compress) #endif - IF (itime_in_file == 1) then + IF (itime_in_file == 1) THEN CALL ncio_put_attr (file_hist, varname, 'long_name', longname) CALL ncio_put_attr (file_hist, varname, 'units', units) CALL ncio_put_attr (file_hist, varname, 'missing_value', spval) @@ -231,20 +231,20 @@ subroutine aggregate_to_vector_and_write_2d ( & CALL mpi_barrier (p_comm_glb, p_err) #endif - end subroutine aggregate_to_vector_and_write_2d + END SUBROUTINE aggregate_to_vector_and_write_2d ! ------- - subroutine aggregate_to_vector_and_write_3d ( & + SUBROUTINE aggregate_to_vector_and_write_3d ( & acc_vec_patch, file_hist, varname, itime_in_file, dim1name, lb1, ndim1, filter, & longname, units) - use MOD_Precision - use MOD_SPMD_Task - use MOD_Namelist + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist USE MOD_LandPatch - use MOD_Vars_1DAccFluxes, only: nac - use MOD_Vars_Global, only: spval - implicit none + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE real(r8), intent(in) :: acc_vec_patch (lb1:,:) character(len=*), intent(in) :: file_hist @@ -259,12 +259,12 @@ subroutine aggregate_to_vector_and_write_3d ( & character(len=*), intent(in) :: units ! Local variables - INTEGER :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress - INTEGER :: ub1, i1 - LOGICAL, allocatable :: mask(:) - REAL(r8), allocatable :: frac(:) - REAL(r8), allocatable :: acc_vec(:,:), rcache(:,:) - REAL(r8) :: sumwt + integer :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress + integer :: ub1, i1 + logical, allocatable :: mask(:) + real(r8), allocatable :: frac(:) + real(r8), allocatable :: acc_vec(:,:), rcache(:,:) + real(r8) :: sumwt #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) @@ -272,7 +272,7 @@ subroutine aggregate_to_vector_and_write_3d ( & ub1 = lb1 + ndim1 - 1 - if (p_is_worker) then + IF (p_is_worker) THEN #ifdef CATCHMENT numset = numhru #else @@ -314,13 +314,13 @@ subroutine aggregate_to_vector_and_write_3d ( & deallocate(frac) ENDIF ENDDO - end if + ENDIF #ifdef USEMPI mesg = (/p_iam_glb, numset/) - call mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) + CALL mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) IF (numset > 0) THEN - call mpi_send (acc_vec, ndim1 * numset, MPI_REAL8, & + CALL mpi_send (acc_vec, ndim1 * numset, MPI_REAL8, & p_root, mpi_tag_data, p_comm_glb, p_err) ENDIF #endif @@ -340,14 +340,14 @@ subroutine aggregate_to_vector_and_write_3d ( & #ifdef USEMPI DO iwork = 0, p_np_worker-1 - call mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & + CALL mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & mpi_tag_mesg, p_comm_glb, p_stat, p_err) isrc = mesg(1) ndata = mesg(2) IF (ndata > 0) THEN allocate(rcache (ndim1,ndata)) - call mpi_recv (rcache, ndim1*ndata, MPI_REAL8, isrc, & + CALL mpi_recv (rcache, ndim1*ndata, MPI_REAL8, isrc, & mpi_tag_data, p_comm_glb, p_stat, p_err) DO i1 = 1, ndim1 @@ -375,18 +375,18 @@ subroutine aggregate_to_vector_and_write_3d ( & IF (p_is_master) THEN - call ncio_define_dimension (file_hist, dim1name, ndim1) + CALL ncio_define_dimension (file_hist, dim1name, ndim1) compress = DEF_HIST_CompressLevel #ifdef CATCHMENT - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & dim1name, 'hydrounit', 'time', compress) #else - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & dim1name, 'element', 'time', compress) #endif - IF (itime_in_file == 1) then + IF (itime_in_file == 1) THEN CALL ncio_put_attr (file_hist, varname, 'long_name', longname) CALL ncio_put_attr (file_hist, varname, 'units', units) CALL ncio_put_attr (file_hist, varname, 'missing_value', spval) @@ -400,21 +400,21 @@ subroutine aggregate_to_vector_and_write_3d ( & CALL mpi_barrier (p_comm_glb, p_err) #endif - end subroutine aggregate_to_vector_and_write_3d + END SUBROUTINE aggregate_to_vector_and_write_3d ! ------- - subroutine aggregate_to_vector_and_write_4d ( & + SUBROUTINE aggregate_to_vector_and_write_4d ( & acc_vec_patch, file_hist, varname, itime_in_file, & dim1name, lb1, ndim1, dim2name, lb2, ndim2, filter, & longname, units) - use MOD_Precision - use MOD_SPMD_Task - use MOD_Namelist + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist USE MOD_LandPatch - use MOD_Vars_1DAccFluxes, only: nac - use MOD_Vars_Global, only: spval - implicit none + USE MOD_Vars_1DAccFluxes, only: nac + USE MOD_Vars_Global, only: spval + IMPLICIT NONE real(r8), intent(in) :: acc_vec_patch (lb1:,lb2:,:) character(len=*), intent(in) :: file_hist @@ -429,12 +429,12 @@ subroutine aggregate_to_vector_and_write_4d ( & character(len=*), intent(in) :: units ! Local variables - INTEGER :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress - INTEGER :: ub1, i1, ub2, i2 - LOGICAL, allocatable :: mask(:) - REAL(r8), allocatable :: frac(:) - REAL(r8), allocatable :: acc_vec(:,:,:), rcache(:,:,:) - REAL(r8) :: sumwt + integer :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress + integer :: ub1, i1, ub2, i2 + logical, allocatable :: mask(:) + real(r8), allocatable :: frac(:) + real(r8), allocatable :: acc_vec(:,:,:), rcache(:,:,:) + real(r8) :: sumwt #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) @@ -443,7 +443,7 @@ subroutine aggregate_to_vector_and_write_4d ( & ub1 = lb1 + ndim1 - 1 ub2 = lb2 + ndim2 - 1 - if (p_is_worker) then + IF (p_is_worker) THEN #ifdef CATCHMENT numset = numhru #else @@ -487,13 +487,13 @@ subroutine aggregate_to_vector_and_write_4d ( & deallocate(frac) ENDIF ENDDO - end if + ENDIF #ifdef USEMPI mesg = (/p_iam_glb, numset/) - call mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) + CALL mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) IF (numset > 0) THEN - call mpi_send (acc_vec, ndim1 * ndim2 * numset, MPI_REAL8, & + CALL mpi_send (acc_vec, ndim1 * ndim2 * numset, MPI_REAL8, & p_root, mpi_tag_data, p_comm_glb, p_err) ENDIF #endif @@ -513,14 +513,14 @@ subroutine aggregate_to_vector_and_write_4d ( & #ifdef USEMPI DO iwork = 0, p_np_worker-1 - call mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & + CALL mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & mpi_tag_mesg, p_comm_glb, p_stat, p_err) isrc = mesg(1) ndata = mesg(2) IF (ndata > 0) THEN allocate(rcache (ndim1,ndim2,ndata)) - call mpi_recv (rcache, ndim1 * ndim2 * ndata, MPI_REAL8, isrc, & + CALL mpi_recv (rcache, ndim1 * ndim2 * ndata, MPI_REAL8, isrc, & mpi_tag_data, p_comm_glb, p_stat, p_err) DO i1 = 1, ndim1 @@ -551,19 +551,19 @@ subroutine aggregate_to_vector_and_write_4d ( & IF (p_is_master) THEN - call ncio_define_dimension (file_hist, dim1name, ndim1) - call ncio_define_dimension (file_hist, dim2name, ndim2) + CALL ncio_define_dimension (file_hist, dim1name, ndim1) + CALL ncio_define_dimension (file_hist, dim2name, ndim2) compress = DEF_HIST_CompressLevel #ifdef CATCHMENT - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & dim1name, dim2name, 'hydrounit', 'time', compress) #else - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & dim1name, dim2name, 'element', 'time', compress) #endif - IF (itime_in_file == 1) then + IF (itime_in_file == 1) THEN CALL ncio_put_attr (file_hist, varname, 'long_name', longname) CALL ncio_put_attr (file_hist, varname, 'units', units) CALL ncio_put_attr (file_hist, varname, 'missing_value', spval) @@ -577,20 +577,20 @@ subroutine aggregate_to_vector_and_write_4d ( & CALL mpi_barrier (p_comm_glb, p_err) #endif - end subroutine aggregate_to_vector_and_write_4d + END SUBROUTINE aggregate_to_vector_and_write_4d ! ------- - subroutine aggregate_to_vector_and_write_ln ( & + SUBROUTINE aggregate_to_vector_and_write_ln ( & acc_vec_patch, file_hist, varname, itime_in_file, filter, & longname, units) - use MOD_Precision - use MOD_SPMD_Task - use MOD_Namelist + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Namelist USE MOD_LandPatch - use MOD_Vars_1DAccFluxes, only: nac_ln - use MOD_Vars_Global, only: spval - implicit none + USE MOD_Vars_1DAccFluxes, only: nac_ln + USE MOD_Vars_Global, only: spval + IMPLICIT NONE real(r8), intent(in) :: acc_vec_patch (:) character(len=*), intent(in) :: file_hist @@ -602,17 +602,17 @@ subroutine aggregate_to_vector_and_write_ln ( & character(len=*), intent(in) :: units ! Local variables - INTEGER :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress - LOGICAL, allocatable :: mask(:) - REAL(r8), allocatable :: frac(:) - REAL(r8), allocatable :: acc_vec(:), rcache(:) - REAL(r8) :: sumwt + integer :: numset, totalnumset, iset, istt, iend, iwork, mesg(2), isrc, ndata, compress + logical, allocatable :: mask(:) + real(r8), allocatable :: frac(:) + real(r8), allocatable :: acc_vec(:), rcache(:) + real(r8) :: sumwt #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) #endif - if (p_is_worker) then + IF (p_is_worker) THEN #ifdef CATCHMENT numset = numhru #else @@ -652,13 +652,13 @@ subroutine aggregate_to_vector_and_write_ln ( & deallocate(frac) ENDIF ENDDO - end if + ENDIF #ifdef USEMPI mesg = (/p_iam_glb, numset/) - call mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) + CALL mpi_send (mesg, 2, MPI_INTEGER, p_root, mpi_tag_mesg, p_comm_glb, p_err) IF (numset > 0) THEN - call mpi_send (acc_vec, numset, MPI_REAL8, & + CALL mpi_send (acc_vec, numset, MPI_REAL8, & p_root, mpi_tag_data, p_comm_glb, p_err) ENDIF #endif @@ -678,14 +678,14 @@ subroutine aggregate_to_vector_and_write_ln ( & #ifdef USEMPI DO iwork = 0, p_np_worker-1 - call mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & + CALL mpi_recv (mesg, 2, MPI_INTEGER, MPI_ANY_SOURCE, & mpi_tag_mesg, p_comm_glb, p_stat, p_err) isrc = mesg(1) ndata = mesg(2) IF (ndata > 0) THEN allocate(rcache (ndata)) - call mpi_recv (rcache, ndata, MPI_REAL8, isrc, & + CALL mpi_recv (rcache, ndata, MPI_REAL8, isrc, & mpi_tag_data, p_comm_glb, p_stat, p_err) #ifdef CATCHMENT @@ -710,14 +710,14 @@ subroutine aggregate_to_vector_and_write_ln ( & compress = DEF_HIST_CompressLevel #ifdef CATCHMENT - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & 'hydrounit', 'time', compress) #else - call ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & + CALL ncio_write_serial_time (file_hist, varname, itime_in_file, acc_vec, & 'element', 'time', compress) #endif - IF (itime_in_file == 1) then + IF (itime_in_file == 1) THEN CALL ncio_put_attr (file_hist, varname, 'long_name', longname) CALL ncio_put_attr (file_hist, varname, 'units', units) CALL ncio_put_attr (file_hist, varname, 'missing_value', spval) @@ -731,7 +731,7 @@ subroutine aggregate_to_vector_and_write_ln ( & CALL mpi_barrier (p_comm_glb, p_err) #endif - end subroutine aggregate_to_vector_and_write_ln + END SUBROUTINE aggregate_to_vector_and_write_ln -end module MOD_HistVector +END MODULE MOD_HistVector #endif diff --git a/main/MOD_HistWriteBack.F90 b/main/MOD_HistWriteBack.F90 old mode 100755 new mode 100644 index ce5e7a39..8d62a54e --- a/main/MOD_HistWriteBack.F90 +++ b/main/MOD_HistWriteBack.F90 @@ -371,8 +371,17 @@ SUBROUTINE hist_writeback_daemon () deallocate(wdata1d) CASE (2) - CALL ncio_write_serial_time (filename, dataname, itime_in_file, wdata2d, & - dim1name, dim2name, dim3name, compress) + IF (.not. & + ((trim(dataname) == 'landarea') .or. (trim(dataname) == 'landfraction'))) THEN + + CALL ncio_write_serial_time (filename, dataname, itime_in_file, wdata2d, & + dim1name, dim2name, dim3name, compress) + + ELSEIF (itime_in_file == 1) THEN + + CALL ncio_write_serial (filename, dataname, wdata2d, dim1name, dim2name, compress) + + ENDIF deallocate(wdata2d) CASE (3) diff --git a/main/MOD_OrbCosazi.F90 b/main/MOD_OrbCosazi.F90 deleted file mode 100644 index 3838e52a..00000000 --- a/main/MOD_OrbCosazi.F90 +++ /dev/null @@ -1,68 +0,0 @@ -#include - -MODULE MOD_OrbCosazi - -!----------------------------------------------------------------------- - USE MOD_Precision - IMPLICIT NONE - SAVE - -! PUBLIC MEMBER FUNCTIONS: - PUBLIC :: orb_cosazi -!----------------------------------------------------------------------- - -CONTAINS - -!----------------------------------------------------------------------- - - FUNCTION orb_cosazi(calday, dlon, dlat, coszen) - -!----------------------------------------------------------------------- - USE MOD_Precision - IMPLICIT NONE - - REAL(r8), intent(in) :: calday !Julian cal day (1.xx to 365.xx) - REAL(r8), intent(in) :: dlat !Centered latitude (radians) - REAL(r8), intent(in) :: dlon !Centered longitude (radians) - REAL(r8), intent(in) :: coszen !cosine of sun zenith angle - REAL(r8) :: orb_cosazi !cosine of sun azimuth angle - - ! --- Local variables --- - REAL(r8) declin !Solar declination (radians) - REAL(r8) eccf !Earth-sun distance factor (ie. (1/r)**2) - REAL(r8) lambm !Lambda m, mean long of perihelion (rad) - REAL(r8) lmm !Intermediate argument involving lambm - REAL(r8) lamb !Lambda, the earths long of perihelion - REAL(r8) invrho !Inverse normalized sun/earth distance - REAL(r8) sinl !Sine of lmm - REAL(r8) pi !3.14159265358979323846... - REAL(r8), parameter :: & - dayspy=365.0, &!days per year - ve=80.5, &!Calday of vernal equinox assumes Jan 1 = calday 1 - eccen=1.672393084E-2, &!Eccentricity - obliqr=0.409214646, &!Earths obliquity in radians - lambm0=-3.2625366E-2, &!Mean long of perihelion at the vernal equinox (radians) - mvelpp=4.92251015 !moving vernal equinox longitude of - !perihelion plus pi (radians) - !------------------------------------------------------------------------------- - - pi = 4.*atan(1.) - lambm = lambm0 + (calday - ve)*2.*pi/dayspy - lmm = lambm - mvelpp - - sinl = sin(lmm) - lamb = lambm + eccen*(2.*sinl + eccen*(1.25*sin(2.*lmm) & - + eccen*((13.0/12.0)*sin(3.*lmm) - 0.25*sinl))) - invrho = (1. + eccen*cos(lamb - mvelpp)) / (1. - eccen*eccen) - - declin = asin(sin(obliqr)*sin(lamb)) - eccf = invrho*invrho - - orb_cosazi = (-1*cos(declin)*cos(calday*2.0*pi+dlon)- & - coszen*cos(dlat))/(sin(dlat)*sqrt(1-coszen*coszen)) - IF (orb_cosazi<-1) orb_cosazi = -1 - IF (orb_cosazi>1) orb_cosazi = 1 - - END FUNCTION orb_cosazi - -END MODULE MOD_OrbCosazi diff --git a/main/MOD_OrbCoszen.F90 b/main/MOD_OrbCoszen.F90 index 4ead0cc0..7dc1093e 100644 --- a/main/MOD_OrbCoszen.F90 +++ b/main/MOD_OrbCoszen.F90 @@ -72,9 +72,6 @@ FUNCTION orb_coszen(calday,dlon,dlat) orb_coszen = sin(dlat)*sin(declin) & - cos(dlat)*cos(declin)*cos(calday*2.0*pi+dlon) - IF (orb_coszen<0) orb_coszen = 0 - IF (orb_coszen>1) orb_coszen = 1 - END FUNCTION orb_coszen END MODULE MOD_OrbCoszen diff --git a/main/MOD_Vars_1DForcing.F90 b/main/MOD_Vars_1DForcing.F90 index 277095e6..9613a039 100644 --- a/main/MOD_Vars_1DForcing.F90 +++ b/main/MOD_Vars_1DForcing.F90 @@ -30,7 +30,6 @@ MODULE MOD_Vars_1DForcing real(r8), allocatable :: forc_solsd (:) ! atm vis diffuse solar rad onto srf [W/m2] real(r8), allocatable :: forc_solld (:) ! atm nir diffuse solar rad onto srf [W/m2] real(r8), allocatable :: forc_frl (:) ! atmospheric infrared (longwave) radiation [W/m2] - real(r8), allocatable :: forc_swrad (:) ! atmospheric shortwave radiation [W/m2] real(r8), allocatable :: forc_hgt_u (:) ! observational height of wind [m] real(r8), allocatable :: forc_hgt_t (:) ! observational height of temperature [m] real(r8), allocatable :: forc_hgt_q (:) ! observational height of humidity [m] @@ -83,7 +82,6 @@ SUBROUTINE allocate_1D_Forcing allocate (forc_solsd (numpatch) ) ! atm vis diffuse solar rad onto srf [W/m2] allocate (forc_solld (numpatch) ) ! atm nir diffuse solar rad onto srf [W/m2] allocate (forc_frl (numpatch) ) ! atmospheric infrared (longwave) radiation [W/m2] - allocate (forc_swrad (numpatch) ) ! atmospheric shortwave radiation [W/m2] allocate (forc_hgt_u (numpatch) ) ! observational height of wind [m] allocate (forc_hgt_t (numpatch) ) ! observational height of temperature [m] allocate (forc_hgt_q (numpatch) ) ! observational height of humidity [m] @@ -135,7 +133,6 @@ SUBROUTINE deallocate_1D_Forcing () deallocate ( forc_solsd ) ! atm vis diffuse solar rad onto srf [W/m2] deallocate ( forc_solld ) ! atm nir diffuse solar rad onto srf [W/m2] deallocate ( forc_frl ) ! atmospheric infrared (longwave) radiation [W/m2] - deallocate ( forc_swrad ) ! atmospheric shortwave radiation [W/m2] deallocate ( forc_hgt_u ) ! observational height of wind [m] deallocate ( forc_hgt_t ) ! observational height of temperature [m] deallocate ( forc_hgt_q ) ! observational height of humidity [m] diff --git a/main/MOD_Vars_Global.F90 b/main/MOD_Vars_Global.F90 index d3a7bd8a..24a528be 100644 --- a/main/MOD_Vars_Global.F90 +++ b/main/MOD_Vars_Global.F90 @@ -54,11 +54,6 @@ MODULE MOD_Vars_Global integer, parameter :: nl_roof = 10 integer, parameter :: nl_wall = 10 integer, parameter :: nvegwcs = 4 ! number of vegetation water potential nodes - - ! used for downscaling - integer, parameter :: num_type = 4 - integer, parameter :: num_zenith = 51 - integer, parameter :: num_azimuth = 36 ! bgc variables integer, parameter :: ndecomp_pools = 7 diff --git a/main/MOD_Vars_TimeInvariants.F90 b/main/MOD_Vars_TimeInvariants.F90 index 0b5f56be..f6f907c7 100644 --- a/main/MOD_Vars_TimeInvariants.F90 +++ b/main/MOD_Vars_TimeInvariants.F90 @@ -15,7 +15,6 @@ MODULE MOD_Vars_PFTimeInvariants USE MOD_Precision USE MOD_Vars_Global - IMPLICIT NONE SAVE @@ -24,6 +23,9 @@ MODULE MOD_Vars_PFTimeInvariants real(r8), allocatable :: pftfrac (:) !PFT fractional cover real(r8), allocatable :: htop_p (:) !canopy top height [m] real(r8), allocatable :: hbot_p (:) !canopy bottom height [m] +#ifdef CROP + real(r8), allocatable :: cropfrac (:) !Crop fractional cover +#endif ! PUBLIC MEMBER FUNCTIONS: PUBLIC :: allocate_PFTimeInvariants @@ -48,6 +50,7 @@ SUBROUTINE allocate_PFTimeInvariants ! -------------------------------------------------------------------- USE MOD_SPMD_Task + USE MOD_LandPatch, only : numpatch USE MOD_LandPFT, only : numpft USE MOD_Precision IMPLICIT NONE @@ -58,6 +61,9 @@ SUBROUTINE allocate_PFTimeInvariants allocate (pftfrac (numpft)) allocate (htop_p (numpft)) allocate (hbot_p (numpft)) +#ifdef CROP + allocate (cropfrac (numpatch)) +#endif ENDIF ENDIF @@ -66,6 +72,7 @@ END SUBROUTINE allocate_PFTimeInvariants SUBROUTINE READ_PFTimeInvariants (file_restart) USE MOD_NetCDFVector + USE MOD_LandPatch USE MOD_LandPFT IMPLICIT NONE @@ -75,6 +82,9 @@ SUBROUTINE READ_PFTimeInvariants (file_restart) CALL ncio_read_vector (file_restart, 'pftfrac ', landpft, pftfrac ) ! CALL ncio_read_vector (file_restart, 'htop_p ', landpft, htop_p ) ! CALL ncio_read_vector (file_restart, 'hbot_p ', landpft, hbot_p ) ! +#ifdef CROP + CALL ncio_read_vector (file_restart, 'cropfrac ', landpatch, cropfrac) ! +#endif END SUBROUTINE READ_PFTimeInvariants @@ -82,6 +92,7 @@ SUBROUTINE WRITE_PFTimeInvariants (file_restart) USE MOD_NetCDFVector USE MOD_LandPFT + USE MOD_LandPatch USE MOD_Namelist USE MOD_Vars_Global IMPLICIT NONE @@ -100,6 +111,11 @@ SUBROUTINE WRITE_PFTimeInvariants (file_restart) CALL ncio_write_vector (file_restart, 'htop_p ', 'pft', landpft, htop_p , compress) ! CALL ncio_write_vector (file_restart, 'hbot_p ', 'pft', landpft, hbot_p , compress) ! +#ifdef CROP + CALL ncio_define_dimension_vector (file_restart, landpatch, 'patch') + CALL ncio_write_vector (file_restart, 'cropfrac', 'patch', landpatch, cropfrac, compress) ! +#endif + END SUBROUTINE WRITE_PFTimeInvariants SUBROUTINE deallocate_PFTimeInvariants @@ -115,6 +131,9 @@ SUBROUTINE deallocate_PFTimeInvariants deallocate (pftfrac ) deallocate (htop_p ) deallocate (hbot_p ) +#ifdef CROP + deallocate (cropfrac) +#endif ENDIF ENDIF @@ -124,18 +143,13 @@ END SUBROUTINE deallocate_PFTimeInvariants SUBROUTINE check_PFTimeInvariants () USE MOD_RangeCheck -#ifdef CROP - USE MOD_LandPatch -#endif IMPLICIT NONE CALL check_vector_data ('pftfrac', pftfrac) ! CALL check_vector_data ('htop_p ', htop_p ) ! CALL check_vector_data ('hbot_p ', hbot_p ) ! #ifdef CROP - IF (landpatch%has_shared) THEN - CALL check_vector_data ('pct crop', landpatch%pctshared) ! - ENDIF + CALL check_vector_data ('cropfrac', cropfrac) ! #endif END SUBROUTINE check_PFTimeInvariants @@ -241,14 +255,6 @@ MODULE MOD_Vars_TimeInvariants real(r8) :: tcrit !critical temp. to determine rain or snow real(r8) :: wetwatmax !maximum wetland water (mm) -! Used for downscaling - real(r8), allocatable :: svf_patches (:) ! sky view factor - real(r8), allocatable :: cur_patches (:) ! curvature - real(r8), allocatable :: sf_lut_patches (:,:,:) ! look up table of shadow factor of a patch - real(r8), allocatable :: asp_type_patches (:,:) ! topographic aspect of each character of one patch - real(r8), allocatable :: slp_type_patches (:,:) ! topographic slope of each character of one patch - real(r8), allocatable :: area_type_patches (:,:) ! area percentage of each character of one patch - ! PUBLIC MEMBER FUNCTIONS: PUBLIC :: allocate_TimeInvariants PUBLIC :: deallocate_TimeInvariants @@ -336,13 +342,6 @@ SUBROUTINE allocate_TimeInvariants () allocate (ibedrock (numpatch)) allocate (topoelv (numpatch)) allocate (topostd (numpatch)) -! Used for downscaling - allocate (svf_patches (numpatch)) - allocate (asp_type_patches (num_type,numpatch)) - allocate (slp_type_patches (num_type,numpatch)) - allocate (area_type_patches (num_type,numpatch)) - allocate (sf_lut_patches (num_azimuth,num_zenith,numpatch)) - allocate (cur_patches (numpatch)) ENDIF #if (defined LULC_IGBP_PFT || defined LULC_IGBP_PC) @@ -383,11 +382,13 @@ SUBROUTINE READ_TimeInvariants (lc_year, casename, dir_restart) integer , intent(in) :: lc_year character(len=*), intent(in) :: casename character(len=*), intent(in) :: dir_restart + ! Local variables - character(len=256) :: file_restart, cyear, lndname + character(len=256) :: file_restart, cyear write(cyear,'(i4.4)') lc_year file_restart = trim(dir_restart) // '/const/' // trim(casename) //'_restart_const' // '_lc' // trim(cyear) // '.nc' + CALL ncio_read_vector (file_restart, 'patchclass', landpatch, patchclass) ! CALL ncio_read_vector (file_restart, 'patchtype' , landpatch, patchtype ) ! CALL ncio_read_vector (file_restart, 'patchmask' , landpatch, patchmask ) ! @@ -467,26 +468,6 @@ SUBROUTINE READ_TimeInvariants (lc_year, casename, dir_restart) CALL ncio_read_bcast_serial (file_restart, 'tcrit ', tcrit ) ! critical temp. to determine rain or snow CALL ncio_read_bcast_serial (file_restart, 'wetwatmax', wetwatmax) ! maximum wetland water (mm) - !------------------------------------------------------------------------------------------------- - ! Used for downscaling - !------------------------------------------------------------------------------------------------- - IF (DEF_USE_Forcing_Downscaling) THEN - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/slp_type_patches.nc' ! slope - CALL ncio_read_vector (lndname, 'slp_type_patches', num_type, landpatch, slp_type_patches) - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/svf_patches.nc' ! sky view factor - CALL ncio_read_vector (lndname, 'svf_patches', landpatch, svf_patches) - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/asp_type_patches.nc' ! aspect - CALL ncio_read_vector (lndname, 'asp_type_patches', num_type, landpatch, asp_type_patches) - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/area_type_patches.nc' ! area percent - CALL ncio_read_vector (lndname, 'area_type_patches', num_type, landpatch, area_type_patches) - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/sf_lut_patches.nc' ! shadow mask - CALL ncio_read_vector (lndname, 'sf_lut_patches', num_azimuth, num_zenith, landpatch, sf_lut_patches) - lndname = trim(DEF_dir_landdata) // '/topography/'//trim(cyear)//'/cur_patches.nc' ! curvature - CALL ncio_read_vector (lndname, 'cur_patches', landpatch, cur_patches) - ENDIF - - - #if (defined LULC_IGBP_PFT || defined LULC_IGBP_PC) file_restart = trim(dir_restart) // '/const/' // trim(casename) //'_restart_pft_const' // '_lc' // trim(cyear) // '.nc' CALL READ_PFTimeInvariants (file_restart) @@ -683,7 +664,6 @@ SUBROUTINE deallocate_TimeInvariants () USE MOD_SPMD_Task USE MOD_LandPatch, only: numpatch - USE MOD_Namelist IMPLICIT NONE @@ -756,14 +736,6 @@ SUBROUTINE deallocate_TimeInvariants () deallocate (topoelv ) deallocate (topostd ) - IF (DEF_USE_Forcing_Downscaling) THEN - deallocate(slp_type_patches ) - deallocate(svf_patches ) - deallocate(asp_type_patches ) - deallocate(area_type_patches ) - deallocate(sf_lut_patches ) - ENDIF - ENDIF ENDIF @@ -786,7 +758,7 @@ SUBROUTINE check_TimeInvariants () USE MOD_SPMD_Task USE MOD_RangeCheck - USE MOD_Namelist !, only : DEF_USE_BEDROCK + USE MOD_Namelist, only : DEF_USE_BEDROCK IMPLICIT NONE @@ -845,15 +817,6 @@ SUBROUTINE check_TimeInvariants () CALL check_vector_data ('topostd [m] ', topostd ) ! CALL check_vector_data ('BVIC [-] ', BVIC ) ! - !??? -IF (DEF_USE_Forcing_Downscaling) THEN - CALL check_vector_data ('slp_type_patches [rad] ' , slp_type_patches) ! slope - CALL check_vector_data ('svf_patches [-] ' , svf_patches) ! sky view factor - CALL check_vector_data ('asp_type_patches [rad] ' , asp_type_patches) ! aspect - CALL check_vector_data ('area_type_patches [-] ' , area_type_patches) ! area percent - CALL check_vector_data ('sf_lut_patches [-] ' , sf_lut_patches) ! shadow mask -ENDIF - #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) #endif diff --git a/mkinidata/MOD_PercentagesPFTReadin.F90 b/mkinidata/MOD_PercentagesPFTReadin.F90 old mode 100755 new mode 100644 index 9cf2390e..34745310 --- a/mkinidata/MOD_PercentagesPFTReadin.F90 +++ b/mkinidata/MOD_PercentagesPFTReadin.F90 @@ -27,7 +27,7 @@ SUBROUTINE pct_readin (dir_landdata, lc_year) #endif #if (defined LULC_IGBP_PFT || defined LULC_IGBP_PC) USE MOD_LandPFT - USE MOD_Vars_PFTimeInvariants, only : pftfrac + USE MOD_Vars_PFTimeInvariants #endif #ifdef SinglePoint USE MOD_SingleSrfdata @@ -50,6 +50,19 @@ SUBROUTINE pct_readin (dir_landdata, lc_year) pftfrac = pack(SITE_pctpfts, SITE_pctpfts > 0.) #endif +#if (defined CROP) +#ifndef SinglePoint + lndname = trim(dir_landdata)//'/pctpft/'//trim(cyear)//'/pct_crops.nc' + CALL ncio_read_vector (lndname, 'pct_crops', landpatch, cropfrac) +#else + IF (SITE_landtype == CROPLAND) THEN + cropfrac = pack(SITE_pctcrop, SITE_pctcrop > 0.) + ELSE + cropfrac = 0. + ENDIF +#endif +#endif + #ifdef RangeCheck IF (p_is_worker) THEN npatch = count(patchtypes(landpatch%settyp) == 0) @@ -67,12 +80,10 @@ SUBROUTINE pct_readin (dir_landdata, lc_year) CALL check_vector_data ('Sum PFT pct', sumpct) #if (defined CROP) - IF (landpatch%has_shared) THEN - CALL check_vector_data ('CROP pct', landpatch%pctshared) - ENDIF + CALL check_vector_data ('CROP pct', cropfrac) #endif - #endif + #endif IF (allocated(sumpct)) deallocate(sumpct) diff --git a/mksrfdata/Aggregation_PercentagesPFT.F90 b/mksrfdata/Aggregation_PercentagesPFT.F90 old mode 100755 new mode 100644 index f97ede0b..781df19e --- a/mksrfdata/Aggregation_PercentagesPFT.F90 +++ b/mksrfdata/Aggregation_PercentagesPFT.F90 @@ -191,23 +191,22 @@ SUBROUTINE Aggregation_PercentagesPFT (gland, dir_rawdata, dir_model_landdata, l #if (defined CROP) #ifndef SinglePoint + lndname = trim(landdir)//'/pct_crops.nc' + CALL ncio_create_file_vector (lndname, landpatch) + CALL ncio_define_dimension_vector (lndname, landpatch, 'patch') + CALL ncio_write_vector (lndname, 'pct_crops', 'patch', landpatch, pctshrpch, DEF_Srfdata_CompressLevel) + #ifdef SrfdataDiag - IF (landpatch%has_shared) THEN - typcrop = (/(ityp, ityp = 1, N_CFT)/) - lndname = trim(dir_model_landdata) // '/diag/pct_crop_patch_' // trim(cyear) // '.nc' - CALL srfdata_map_and_write (landpatch%pctshared, cropclass, typcrop, m_patch2diag, & - -1.0e36_r8, lndname, 'pct_crop_patch', compress = 1, write_mode = 'one') - ENDIF + typcrop = (/(ityp, ityp = 1, N_CFT)/) + lndname = trim(dir_model_landdata) // '/diag/pct_crop_patch_' // trim(cyear) // '.nc' + CALL srfdata_map_and_write (pctshrpch, cropclass, typcrop, m_patch2diag, & + -1.0e36_r8, lndname, 'pct_crop_patch', compress = 1, write_mode = 'one') #endif #else allocate (SITE_croptyp(numpatch)) allocate (SITE_pctcrop(numpatch)) SITE_croptyp = cropclass - IF (landpatch%has_shared) THEN - SITE_pctcrop = landpatch%pctshared - ELSE - SITE_pctcrop = 1. - ENDIF + SITE_pctcrop = pctshrpch #endif #endif diff --git a/mksrfdata/Aggregation_TopographyFactors.F90 b/mksrfdata/Aggregation_TopographyFactors.F90 deleted file mode 100644 index 7bbc086f..00000000 --- a/mksrfdata/Aggregation_TopographyFactors.F90 +++ /dev/null @@ -1,417 +0,0 @@ -#include - -SUBROUTINE Aggregation_TopographyFactors ( & - grid_topo_factor , dir_rawdata, dir_model_landdata, lc_year) - ! ---------------------------------------------------------------------- - ! Global topography-based factors data - ! - ! Created by Sisi Chen, Lu Li, 06/2024 - ! ---------------------------------------------------------------------- - USE MOD_Precision - USE MOD_Namelist - USE MOD_SPMD_Task - USE MOD_Grid - USE MOD_LandPatch - USE MOD_NetCDFVector - USE MOD_NetCDFBlock -#ifdef RangeCheck - USE MOD_RangeCheck -#endif - USE MOD_AggregationRequestData - USE MOD_Utils -#ifdef SrfdataDiag - USE MOD_Mesh, only : numelm - USE MOD_LandElm - USE MOD_SrfdataDiag -#endif - - IMPLICIT NONE - - ! arguments: - ! --------------------------------------------------------------- - INTEGER, intent(in) :: lc_year - TYPE(grid_type), intent(in) :: grid_topo_factor ! Grid structure for high resolution topography factors - CHARACTER(len=*), intent(in) :: dir_rawdata ! Direct of Rawdata - CHARACTER(len=*), intent(in) :: dir_model_landdata - - ! local variables: - ! --------------------------------------------------------------- - CHARACTER(len=256) :: landdir, lndname, cyear - CHARACTER(len=3) :: sdir - - TYPE (block_data_real8_2d) :: slp_grid ! slope - TYPE (block_data_real8_2d) :: asp_grid ! aspect - TYPE (block_data_real8_2d) :: svf_grid ! sky view factor - TYPE (block_data_real8_2d) :: cur_grid ! curvature - TYPE (block_data_real8_3d) :: tea_f_grid ! sine of terrain elevation angle at front of grid - TYPE (block_data_real8_3d) :: tea_b_grid ! sine of terrain elevation angle at back of grid - - ! patch - REAL(r8), allocatable :: svf_patches (:) - REAL(r8), allocatable :: cur_patches (:) - REAL(r8), allocatable :: tea_f_azi_patches (:,:) ! shape as (azimuth, patches) - REAL(r8), allocatable :: tea_b_azi_patches (:,:) - REAL(r8), allocatable :: sf_lut_patches (:,:,:) ! shape as (azimuth, zenith, patches) - - ! four defined types at all patches - REAL(r8), allocatable :: asp_type_patches (:,:) ! shape as (type, patches) - REAL(r8), allocatable :: slp_type_patches (:,:) - REAL(r8), allocatable :: area_type_patches (:,:) - - ! pixelsets - REAL(r8), allocatable :: slp_one (:) - REAL(r8), allocatable :: asp_one (:) - REAL(r8), allocatable :: svf_one (:) - REAL(r8), allocatable :: cur_one (:) - REAL(r8), allocatable :: area_one (:) - REAL(r8), allocatable :: sf_one (:) - REAL(r8), allocatable :: tea_f_azi_one (:,:) - REAL(r8), allocatable :: tea_b_azi_one (:,:) - REAL(r8), allocatable :: tea_f_one (:) - REAL(r8), allocatable :: tea_b_one (:) - LOGICAL , allocatable :: sf_mask_one (:) - LOGICAL , allocatable :: asp_mask_one (:) - LOGICAL , allocatable :: area_mask_one (:) - LOGICAL , allocatable :: slp_mask_one (:) - - ! pixelsets of four defined types at each patch - REAL(r8), allocatable :: asp_type_one (:,:) - REAL(r8), allocatable :: slp_type_one (:,:) - REAL(r8), allocatable :: area_type_one (:,:) - - REAL(r8) :: sum_area_one ! sum of pixel area of a patch - REAL(r8) :: zenith_angle(num_zenith) ! sine of sun zenith angle (divided by num_zenith part) - - ! local variables - INTEGER :: ipatch, i, ps, pe, type, a, z, count_pixels, num_pixels - -#ifdef SrfdataDiag - INTEGER :: typpatch(N_land_classification+1), ityp ! number of land classification -#endif - write(cyear,'(i4.4)') lc_year - landdir = trim(dir_model_landdata) // '/topography/' // trim(cyear) - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - IF (p_is_master) THEN - write(*,'(/, A)') 'Aggregate topography factor ...' - CALL system('mkdir -p ' // trim(adjustl(landdir))) - ENDIF -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - -#ifdef SinglePoint - IF (USE_SITE_topography) THEN - RETURN - ENDIF -#endif - - ! ------------------------------------------------------------------- - ! read topography-based factor data - ! ------------------------------------------------------------------- - IF (p_is_io) THEN - lndname = trim(dir_rawdata)//"slope.nc" - CALL allocate_block_data (grid_topo_factor, slp_grid) - CALL ncio_read_block (lndname, 'slope', grid_topo_factor, slp_grid) - - lndname= trim(dir_rawdata)//"aspect.nc" - CALL allocate_block_data (grid_topo_factor, asp_grid) - CALL ncio_read_block (lndname, 'aspect', grid_topo_factor, asp_grid) - - lndname= trim(dir_rawdata)//"terrain_elev_angle_front.nc" - CALL allocate_block_data (grid_topo_factor, tea_f_grid, num_azimuth) - CALL ncio_read_block (lndname, 'tea_front', grid_topo_factor, num_azimuth, tea_f_grid) - - lndname= trim(dir_rawdata)//"terrain_elev_angle_back.nc" - CALL allocate_block_data (grid_topo_factor, tea_b_grid, num_azimuth) - CALL ncio_read_block (lndname, 'tea_back', grid_topo_factor, num_azimuth, tea_b_grid) - - lndname= trim(dir_rawdata)//"sky_view_factor.nc" - CALL allocate_block_data (grid_topo_factor, svf_grid) - CALL ncio_read_block (lndname, 'svf', grid_topo_factor, svf_grid) - - lndname= trim(dir_rawdata)//"curvature.nc" - CALL allocate_block_data (grid_topo_factor, cur_grid) - CALL ncio_read_block (lndname, 'curvature', grid_topo_factor, cur_grid) - - ! -------------------------------------------------------------------------- - ! aggregate the terrain factor data from the resolution of raw data to patch - ! -------------------------------------------------------------------------- -#ifdef USEMPI - ! mpi send - CALL aggregation_data_daemon ( grid_topo_factor, & - data_r8_2d_in1 = slp_grid, data_r8_2d_in2 = asp_grid, & - data_r8_2d_in3 = svf_grid, data_r8_2d_in4 = cur_grid, & - data_r8_3d_in1 = tea_f_grid, n1_r8_3d_in1 = num_azimuth, & - data_r8_3d_in2 = tea_b_grid, n1_r8_3d_in2 = num_azimuth) -#endif - ENDIF - - - IF (p_is_worker) THEN - ! allocate for output variables at patches - allocate (svf_patches (numpatch)) - allocate (cur_patches (numpatch)) - allocate (asp_type_patches (num_type, numpatch)) - allocate (slp_type_patches (num_type, numpatch)) - allocate (area_type_patches(num_type, numpatch)) - allocate (sf_lut_patches (num_azimuth, num_zenith, numpatch)) - - ! generate sine of sun zenith angles at equal intervals - DO i = 1, num_zenith - zenith_angle(i) = pi/(2*num_zenith)*(i-1) - ENDDO - - ! aggregate loop - DO ipatch = 1, numpatch - CALL aggregation_request_data (landpatch, ipatch, grid_topo_factor, & - zip = USE_zip_for_aggregation, area = area_one, & - data_r8_2d_in1 = slp_grid, data_r8_2d_out1 = slp_one, & - data_r8_2d_in2 = asp_grid, data_r8_2d_out2 = asp_one, & - data_r8_2d_in3 = svf_grid, data_r8_2d_out3 = svf_one, & - data_r8_2d_in4 = cur_grid, data_r8_2d_out4 = cur_one, & - data_r8_3d_in1 = tea_f_grid, data_r8_3d_out1 = tea_f_azi_one, n1_r8_3d_in1 = num_azimuth, & - data_r8_3d_in2 = tea_b_grid, data_r8_3d_out2 = tea_b_azi_one, n1_r8_3d_in2 = num_azimuth) - - ! ------------------------------------------------------------------ - ! aggregate sky view factor, curvature at patches - ! ------------------------------------------------------------------ - IF (any(svf_one /= -9999.0)) THEN - svf_patches (ipatch) = & - sum(svf_one * area_one, mask = svf_one /= -9999.0) & - / sum(area_one, mask = svf_one /= -9999.0) - ELSE - svf_patches (ipatch) = -1.0e36 - ENDIF - - IF (any(cur_one /= -9999.0)) THEN - cur_patches (ipatch) = & - sum(cur_one * area_one, mask = cur_one /= -9999.0) & - / sum(area_one, mask = cur_one /= -9999.0) - ELSE - cur_patches (ipatch) = -1.0e36 - ENDIF - - ! ------------------------------------------------------------------------------ - ! aggregate look up table of shadow factor at patches - ! ------------------------------------------------------------------------------ - ! allocate pixel variables - allocate(tea_f_one (num_pixels)) - allocate(tea_b_one (num_pixels)) - allocate(sf_one (num_pixels)) - allocate(sf_mask_one (num_pixels)) - - ! number of pixels - num_pixels = size(area_one) - ! sum of areas of one patch - sum_area_one = sum(area_one, mask = area_one>0) - - DO a = 1, num_azimuth - ! terrain elevation angle at each azimuth - tea_f_one(:) = tea_f_azi_one(a,:) - tea_b_one(:) = tea_b_azi_one(a,:) - - DO z = 1, num_zenith - ! count the pixels which are not missing value - count_pixels = 0 - - DO i = 1, num_pixels - IF ((isnan(tea_f_one(i))).or.(isnan(tea_b_one(i)))) THEN - sf_one(i) = 1 ! Not consider the effect of casting shadows - ELSE - IF (tea_f_one(i)>1) tea_f_one(i) = 1 - IF (tea_f_one(i)<-1) tea_f_one(i) = -1 - IF (tea_b_one(i)>1) tea_b_one(i) = 1 - IF (tea_b_one(i)<-1) tea_b_one(i) = -1 - tea_f_one(i) = asin(tea_f_one(i)) - tea_b_one(i) = asin(tea_b_one(i)) - - ! Compare the sun's altitude angle to the terrain's altitude angle - ! to determine the value of the shadow factor. - ! ----------------------------------------------------------------- - ! Sisi Chen, Lu Li, Yongjiu Dai, et al. Exploring Topography Downscaling - ! Methods for Hyper-Resolution Land Surface Modeling. - ! DOI: 10.22541/au.171403656.68476353/v1 - ! ----------------------------------------------------------------- - IF ((tea_b_one(i) /= -9999).and.(tea_f_one(i) /= -9999)) THEN - count_pixels = count_pixels+1 - - IF (pi*0.5 - zenith_angle(z) < tea_b_one(i)) THEN - sf_one(i) = 0 - ELSE IF (pi*0.5 - zenith_angle(z) > tea_f_one(i)) THEN - sf_one(i) = 1 - ELSE - IF (tea_f_one(i).eq.tea_b_one(i)) tea_f_one(i) = tea_b_one(i)+0.001 - sf_one(i) = (0.5*pi - zenith_angle(z) - tea_b_one(i))/(tea_f_one(i) - tea_b_one(i)) - ENDIF - - ENDIF - ENDIF - ENDDO - sf_mask_one(:) = sf_one(:) /= -9999 - sf_lut_patches(a,z,ipatch) = sum(sf_one(:), mask = sf_mask_one)/count_pixels - ENDDO - ENDDO - - ! deallocate - deallocate(tea_f_one) - deallocate(tea_b_one) - deallocate(sf_one) - deallocate(sf_mask_one) - - ! ----------------------------------------------------------------------------------------------- - ! aggregate slope and aspect at four defined types at patches - ! ----------------------------------------------------------------------------------------------- - ! allocate pixelsets variables - allocate(asp_type_one(1:num_type,1:num_pixels)) - allocate(slp_type_one(1:num_type,1:num_pixels)) - allocate(area_type_one(1:num_type,1:num_pixels)) - allocate(slp_mask_one(1:num_pixels)) - allocate(asp_mask_one(1:num_pixels)) - allocate(area_mask_one(1:num_pixels)) - - DO i = 1, num_type - asp_type_one(i,:) = -9999 - slp_type_one(i,:) = -9999 - area_type_one(i,:) = -9999 - ENDDO - - DO i = 1, num_pixels - ! Define the south slope, north slope, abrupt slope and gentle lope of target pixel - IF ((asp_one(i).ge.0 .and. asp_one(i).le.90) .or. (asp_one(i).ge.270 .and. asp_one(i).le.360).and.(slp_one(i).ge.15*pi/180)) THEN ! north abrupt slope - type = 1 - ELSE IF ((asp_one(i).ge.0 .and. asp_one(i).le.90) .or. (asp_one(i).ge.270 .and. asp_one(i).le.360).and.(slp_one(i)<15*pi/180)) THEN ! north gentle slope - type = 2 - ELSE IF ((asp_one(i).gt.90) .and. (asp_one(i).lt.270) .and. (slp_one(i).ge.15*pi/180)) THEN ! south abrupt slope - type = 3 - ELSE IF ((asp_one(i).gt.90) .and. (asp_one(i).lt.270) .and. (slp_one(i).lt.15*pi/180)) THEN ! south gentle slope - type = 4 - ELSE ! missing value=-9999 - cycle - END IF - - IF ((area_one(i)>0).and.(area_one(i) PROGRAM MKSRFDATA - ! ====================================================================== - ! Surface grid edges: - ! The model domain was defined with the north, east, south, west edges: - ! edgen: northern edge of grid : > -90 and <= 90 (degrees) - ! edgee: eastern edge of grid : > western edge and <= 180 - ! edges: southern edge of grid : >= -90 and < 90 - ! edgew: western edge of grid : >= -180 and < 180 - ! - ! Region (global) latitude grid goes from: - ! NORTHERN edge (POLE) to SOUTHERN edge (POLE) - ! Region (global) longitude grid starts at: - ! WESTERN edge (DATELINE with western edge) - ! West of Greenwich defined negative for global grids, - ! the western edge of the longitude grid starts at the dateline - ! - ! Land characteristics at the 30 arc-seconds grid resolution (RAW DATA): - ! 1. Global Terrain Dataset (elevation height, aspect, slope, curvature, sky view factor, terrain elevation angle, ...) - ! 2. Global Land Cover Characteristics (land cover TYPE, plant leaf area index, Forest Height, ...) - ! 3. Global Lakes and Wetlands Characteristics (lake and wetlands types, lake coverage and lake depth) - ! 4. Global Glacier Characteristics - ! 5. Global Urban Characteristics (urban extent, ...) - ! 6. Global Soil Characteristics (...) - ! 7. Global Cultural Characteristics (ON-GONG PROJECT) - ! - ! Land charateristics at the model grid resolution (CREATED): - ! 1. Model grid (longitude, latitude) - ! 2. Fraction (area) of patches of grid (0-1) - ! 2.1 Fraction of land water bodies (lake, reservoir, river) - ! 2.2 Fraction of wetland - ! 2.3 Fraction of glacier - ! 2.4 Fraction of urban and built-up - ! ...... - ! 3. Plant leaf area index - ! 4. Tree height - ! 5. Lake depth - ! 6. Soil thermal and hydraulic parameters - ! - ! Created by Yongjiu Dai, 02/2014 - ! - ! REVISIONS: - ! Shupeng Zhang, 01/2022: porting codes to MPI parallel version - ! ====================================================================== + +! ====================================================================== +! Surface grid edges: +! The model domain was defined with the north, east, south, west edges: +! edgen: northern edge of grid : > -90 and <= 90 (degrees) +! edgee: eastern edge of grid : > western edge and <= 180 +! edges: southern edge of grid : >= -90 and < 90 +! edgew: western edge of grid : >= -180 and < 180 +! +! Region (global) latitude grid goes from: +! NORTHERN edge (POLE) to SOUTHERN edge (POLE) +! Region (global) longitude grid starts at: +! WESTERN edge (DATELINE with western edge) +! West of Greenwich defined negative for global grids, +! the western edge of the longitude grid starts at the dateline +! +! Land characteristics at the 30 arc-seconds grid resolution (RAW DATA): +! 1. Global Terrain Dataset (elevation height,...) +! 2. Global Land Cover Characteristics (land cover type, plant leaf area index, Forest Height, ...) +! 3. Global Lakes and Wetlands Characteristics (lake and wetlands types, lake coverage and lake depth) +! 4. Global Glacier Characteristics +! 5. Global Urban Characteristics (urban extent, ...) +! 6. Global Soil Characteristics (...) +! 7. Global Cultural Characteristics (ON-GONG PROJECT) +! +! Land charateristics at the model grid resolution (CREATED): +! 1. Model grid (longitude, latitude) +! 2. Fraction (area) of patches of grid (0-1) +! 2.1 Fraction of land water bodies (lake, reservoir, river) +! 2.2 Fraction of wetland +! 2.3 Fraction of glacier +! 2.4 Fraction of urban and built-up +! ...... +! 3. Plant leaf area index +! 4. Tree height +! 5. Lake depth +! 6. Soil thermal and hydraulic parameters +! +! Created by Yongjiu Dai, 02/2014 +! +! REVISIONS: +! Shupeng Zhang, 01/2022: porting codes to MPI parallel version +! +! ====================================================================== + USE MOD_Precision USE MOD_SPMD_Task USE MOD_Namelist @@ -71,25 +74,26 @@ PROGRAM MKSRFDATA #ifdef SrfdataDiag USE MOD_SrfdataDiag, only : gdiag, srfdata_diag_init #endif + USE MOD_RegionClip - IMPLICIT NONE + IMPLICIT NONE - CHARACTER(len=256) :: nlfile - CHARACTER(len=256) :: dir_rawdata - CHARACTER(len=256) :: dir_landdata + character(len=256) :: nlfile - REAL(r8) :: edgen ! northern edge of grid (degrees) - REAL(r8) :: edgee ! eastern edge of grid (degrees) - REAL(r8) :: edges ! southern edge of grid (degrees) - REAL(r8) :: edgew ! western edge of grid (degrees) + character(len=256) :: dir_rawdata + character(len=256) :: dir_landdata + real(r8) :: edgen ! northern edge of grid (degrees) + real(r8) :: edgee ! eastern edge of grid (degrees) + real(r8) :: edges ! southern edge of grid (degrees) + real(r8) :: edgew ! western edge of grid (degrees) - TYPE (grid_type) :: gsoil, gridlai, gtopo, grid_topo_factor - TYPE (grid_type) :: grid_urban_5km, grid_urban_500m + type (grid_type) :: gsoil, gridlai, gtopo + type (grid_type) :: grid_urban_5km, grid_urban_500m - INTEGER :: lc_year - INTEGER*8 :: start_time, end_time, c_per_sec, time_used + integer :: lc_year + integer*8 :: start_time, end_time, c_per_sec, time_used #ifdef USEMPI @@ -113,12 +117,14 @@ PROGRAM MKSRFDATA #endif IF (USE_srfdata_from_larger_region) THEN + CALL srfdata_region_clip (DEF_dir_existing_srfdata, DEF_dir_landdata) + #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) CALL spmd_exit #endif - CALL exit() + CALL EXIT() ENDIF IF (USE_srfdata_from_3D_gridded_data) THEN @@ -130,7 +136,7 @@ PROGRAM MKSRFDATA CALL mpi_barrier (p_comm_glb, p_err) CALL spmd_exit #endif - CALL exit() + CALL EXIT() ENDIF dir_rawdata = DEF_dir_rawdata @@ -143,14 +149,13 @@ PROGRAM MKSRFDATA ! define blocks CALL gblock%set () - ! CALL gblock%set_by_file (DEF_file_block) CALL Init_GlobalVars - CALL Init_LC_Const + CAll Init_LC_Const - ! ........................................................................... - ! 1. Read in or create the modeling grids coordinates and related information - ! ........................................................................... +! ........................................................................... +! 1. Read in or create the modeling grids coordinates and related information +! ........................................................................... ! define domain in pixel coordinate CALL pixel%set_edges (edges, edgen, edgew, edgee) @@ -204,10 +209,6 @@ PROGRAM MKSRFDATA ! define grid for topography CALL gtopo%define_by_name ('colm_500m') - ! define grid for topography-based factor (used for forcing downscaling module) - ! TODO: need to change to merit 90m mesh - CALL grid_topo_factor%define_by_name ('heihe_90m') - ! add by dong, only test for making urban data #ifdef URBAN_MODEL CALL gurban%define_by_name ('colm_500m') @@ -239,7 +240,6 @@ PROGRAM MKSRFDATA #endif CALL pixel%assimilate_grid (gtopo) - CALL pixel%assimilate_grid (grid_topo_factor) ! map pixels to grid coordinates #ifndef SinglePoint @@ -265,7 +265,6 @@ PROGRAM MKSRFDATA #endif CALL pixel%map_to_grid (gtopo) - CALL pixel%map_to_grid (grid_topo_factor) ! build land elms CALL mesh_build () @@ -306,9 +305,9 @@ PROGRAM MKSRFDATA CALL landpft_build(lc_year) #endif - ! ................................................................ - ! 2. SAVE land surface tessellation information - ! ................................................................ +! ................................................................ +! 2. SAVE land surface tessellation information +! ................................................................ CALL gblock%save_to_file (dir_landdata) @@ -332,15 +331,11 @@ PROGRAM MKSRFDATA CALL pixelset_save_to_file (dir_landdata, 'landurban', landurban, lc_year) #endif - ! ................................................................ - ! 3. Mapping land characteristic parameters to the model grids - ! ................................................................ +! ................................................................ +! 3. Mapping land characteristic parameters to the model grids +! ................................................................ #ifdef SrfdataDiag -#if (defined CROP) - CALL elm_patch%build (landelm, landpatch, use_frac = .true., sharedfrac = pctshrpch) -#else CALL elm_patch%build (landelm, landpatch, use_frac = .true.) -#endif #ifdef GRIDBASED CALL gdiag%define_by_copy (gridmesh) #else @@ -370,18 +365,14 @@ PROGRAM MKSRFDATA CALL Aggregation_Topography (gtopo , dir_rawdata, dir_landdata, lc_year) - IF (DEF_USE_Forcing_Downscaling) THEN - CALL Aggregation_TopographyFactors (grid_topo_factor, dir_rawdata, dir_landdata, lc_year) - ENDIF - #ifdef URBAN_MODEL CALL Aggregation_urban (dir_rawdata, dir_landdata, lc_year, & grid_urban_5km, grid_urban_500m) #endif - ! ................................................................ - ! 4. Free memories. - ! ................................................................ +! ................................................................ +! 4. Free memories. +! ................................................................ #ifdef SinglePoint #if (defined LULC_IGBP_PFT || defined LULC_IGBP_PC) diff --git a/mksrfdata/MOD_AggregationRequestData.F90 b/mksrfdata/MOD_AggregationRequestData.F90 index 98e85d79..b8e01ca7 100644 --- a/mksrfdata/MOD_AggregationRequestData.F90 +++ b/mksrfdata/MOD_AggregationRequestData.F90 @@ -2,18 +2,18 @@ MODULE MOD_AggregationRequestData - !------------------------------------------------------------- - ! DESCRIPTION: - ! - ! Aggregation Utilities. - ! - ! On IO processes, a data daemon is running to provide data - ! at fine resolutions for worker processes. - ! On worker processes, request is sent to IO processes and - ! data is returned from IO processes. - ! - ! Created by Shupeng Zhang, May 2023 - !------------------------------------------------------------- +!------------------------------------------------------------- +! DESCRIPTION: +! +! Aggregation Utilities. +! +! On IO processes, a data daemon is running to provide data +! at fine resolutions for worker processes. +! On worker processes, request is sent to IO processes and +! data is returned from IO processes. +! +! Created by Shupeng Zhang, May 2023 +!------------------------------------------------------------- IMPLICIT NONE @@ -32,60 +32,52 @@ SUBROUTINE aggregation_data_daemon (grid_in, & data_r8_2d_in1, data_r8_2d_in2, data_r8_2d_in3, data_r8_2d_in4, & data_r8_2d_in5, data_r8_2d_in6, & data_r8_3d_in1, n1_r8_3d_in1 , data_r8_3d_in2, n1_r8_3d_in2, & - data_i4_2d_in1, data_i4_2d_in2, & - data_r8_3d_in3, n1_r8_3d_in3 , data_r8_3d_in4, n1_r8_3d_in4) + data_i4_2d_in1, data_i4_2d_in2) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Grid - USE MOD_DataType + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Grid + USE MOD_DataType - IMPLICIT NONE - - TYPE (grid_type), intent(in) :: grid_in - - ! 2D REAL data - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in1 - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in2 - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in3 - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in4 - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in5 - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in6 - - ! 3D REAL data - INTEGER, intent(in), optional :: n1_r8_3d_in1 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in1 - - INTEGER, intent(in), optional :: n1_r8_3d_in2 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in2 - - INTEGER, intent(in), optional :: n1_r8_3d_in3 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in3 - - INTEGER, intent(in), optional :: n1_r8_3d_in4 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in4 + IMPLICIT NONE - - ! 2D INTEGER data - TYPE (block_data_int32_2d), intent(in), optional :: data_i4_2d_in1 - TYPE (block_data_int32_2d), intent(in), optional :: data_i4_2d_in2 - - ! Local Variables - INTEGER :: nreq, ireq, rmesg(2), isrc, idest - INTEGER :: xblk, yblk, xloc, yloc - INTEGER, allocatable :: ylist(:), xlist(:) + type (grid_type), intent(in) :: grid_in + + ! 2D REAL data + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in1 + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in2 + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in3 + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in4 + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in5 + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in6 + + ! 3D REAL data + integer, intent(in), optional :: n1_r8_3d_in1 + type (block_data_real8_3d), intent(in), optional :: data_r8_3d_in1 + + integer, intent(in), optional :: n1_r8_3d_in2 + type (block_data_real8_3d), intent(in), optional :: data_r8_3d_in2 + + ! 2D INTEGER data + type (block_data_int32_2d), intent(in), optional :: data_i4_2d_in1 + type (block_data_int32_2d), intent(in), optional :: data_i4_2d_in2 + + ! Local Variables + integer :: nreq, ireq, rmesg(2), isrc, idest + integer :: xblk, yblk, xloc, yloc + integer, allocatable :: ylist(:), xlist(:) - REAL(r8), allocatable :: sbuf_r8_1d(:), sbuf_r8_2d(:,:) - INTEGER , allocatable :: sbuf_i4_1d(:) + real(r8), allocatable :: sbuf_r8_1d(:), sbuf_r8_2d(:,:) + integer , allocatable :: sbuf_i4_1d(:) - LOGICAL, allocatable :: worker_done (:) + logical, allocatable :: worker_done (:) IF (p_is_io) THEN allocate (worker_done (0:p_np_worker-1)) worker_done(:) = .false. - DO while (any(.not. worker_done)) + DO WHILE (any(.not. worker_done)) CALL mpi_recv (rmesg, 2, MPI_INTEGER, & MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) @@ -230,43 +222,6 @@ SUBROUTINE aggregation_data_daemon (grid_in, & deallocate (sbuf_r8_2d) ENDIF - IF (present(data_r8_3d_in3) .and. present(n1_r8_3d_in3)) THEN - - allocate (sbuf_r8_2d (n1_r8_3d_in3,nreq)) - DO ireq = 1, nreq - xblk = grid_in%xblk(xlist(ireq)) - yblk = grid_in%yblk(ylist(ireq)) - xloc = grid_in%xloc(xlist(ireq)) - yloc = grid_in%yloc(ylist(ireq)) - - sbuf_r8_2d(:,ireq) = data_r8_3d_in3%blk(xblk,yblk)%val(:,xloc,yloc) - ENDDO - - CALL mpi_send (sbuf_r8_2d, n1_r8_3d_in3*nreq, MPI_REAL8, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (sbuf_r8_2d) - ENDIF - - - IF (present(data_r8_3d_in4) .and. present(n1_r8_3d_in4)) THEN - - allocate (sbuf_r8_2d (n1_r8_3d_in4,nreq)) - DO ireq = 1, nreq - xblk = grid_in%xblk(xlist(ireq)) - yblk = grid_in%yblk(ylist(ireq)) - xloc = grid_in%xloc(xlist(ireq)) - yloc = grid_in%yloc(ylist(ireq)) - - sbuf_r8_2d(:,ireq) = data_r8_3d_in4%blk(xblk,yblk)%val(:,xloc,yloc) - ENDDO - - CALL mpi_send (sbuf_r8_2d, n1_r8_3d_in4*nreq, MPI_REAL8, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (sbuf_r8_2d) - ENDIF - allocate (sbuf_i4_1d (nreq)) IF (present(data_i4_2d_in1)) THEN @@ -327,83 +282,72 @@ SUBROUTINE aggregation_request_data ( & data_r8_2d_in6, data_r8_2d_out6, & data_r8_3d_in1, data_r8_3d_out1, n1_r8_3d_in1, lb1_r8_3d_in1, & data_r8_3d_in2, data_r8_3d_out2, n1_r8_3d_in2, lb1_r8_3d_in2, & - data_r8_3d_in3, data_r8_3d_out3, n1_r8_3d_in3, lb1_r8_3d_in3, & - data_r8_3d_in4, data_r8_3d_out4, n1_r8_3d_in4, lb1_r8_3d_in4, & data_i4_2d_in1, data_i4_2d_out1, & data_i4_2d_in2, data_i4_2d_out2, & filledvalue_i4) - USE MOD_Precision - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixel - USE MOD_Grid - USE MOD_DataType - USE MOD_Mesh - USE MOD_Pixelset - USE MOD_Utils - - IMPLICIT NONE + USE MOD_Precision + USE MOD_SPMD_Task + USE MOD_Block + USE MOD_Pixel + USE MOD_Grid + USE MOD_DataType + USE MOD_Mesh + USE MOD_Pixelset + USE MOD_Utils - TYPE (pixelset_type), intent(in) :: pixelset - INTEGER, intent(in) :: iset - - TYPE (grid_type), intent(in) :: grid_in - logical, intent(in) :: zip - - REAL(r8), allocatable, intent(out), optional :: area(:) - - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in1 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out1 (:) + IMPLICIT NONE - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in2 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out2 (:) + type (pixelset_type), intent(in) :: pixelset + integer, intent(in) :: iset - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in3 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out3 (:) + type (grid_type), intent(in) :: grid_in + logical, intent(in) :: zip + + real(r8), allocatable, intent(out), optional :: area(:) - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in4 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out4 (:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in1 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out1 (:) - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in5 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out5 (:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in2 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out2 (:) - TYPE (block_data_real8_2d), intent(in), optional :: data_r8_2d_in6 - REAL(r8), allocatable, intent(out), optional :: data_r8_2d_out6 (:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in3 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out3 (:) - INTEGER, intent(in), optional :: n1_r8_3d_in1, lb1_r8_3d_in1 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in1 - REAL(r8), allocatable, intent(out), optional :: data_r8_3d_out1 (:,:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in4 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out4 (:) - INTEGER, intent(in), optional :: n1_r8_3d_in2, lb1_r8_3d_in2 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in2 - REAL(r8), allocatable, intent(out), optional :: data_r8_3d_out2 (:,:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in5 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out5 (:) - INTEGER, intent(in), optional :: n1_r8_3d_in3, lb1_r8_3d_in3 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in3 - REAL(r8), allocatable, intent(out), optional :: data_r8_3d_out3 (:,:) + type (block_data_real8_2d), intent(in), optional :: data_r8_2d_in6 + real(r8), allocatable, intent(out), optional :: data_r8_2d_out6 (:) - INTEGER, intent(in), optional :: n1_r8_3d_in4, lb1_r8_3d_in4 - TYPE (block_data_real8_3d), intent(in), optional :: data_r8_3d_in4 - REAL(r8), allocatable, intent(out), optional :: data_r8_3d_out4 (:,:) + integer, intent(in), optional :: n1_r8_3d_in1, lb1_r8_3d_in1 + type (block_data_real8_3d), intent(in), optional :: data_r8_3d_in1 + real(r8), allocatable, intent(out), optional :: data_r8_3d_out1 (:,:) - - TYPE (block_data_int32_2d), intent(in), optional :: data_i4_2d_in1 - INTEGER, allocatable, intent(out), optional :: data_i4_2d_out1 (:) + integer, intent(in), optional :: n1_r8_3d_in2, lb1_r8_3d_in2 + type (block_data_real8_3d), intent(in), optional :: data_r8_3d_in2 + real(r8), allocatable, intent(out), optional :: data_r8_3d_out2 (:,:) + + type (block_data_int32_2d), intent(in), optional :: data_i4_2d_in1 + integer, allocatable, intent(out), optional :: data_i4_2d_out1 (:) - TYPE (block_data_int32_2d), intent(in), optional :: data_i4_2d_in2 - INTEGER, allocatable, intent(out), optional :: data_i4_2d_out2 (:) + type (block_data_int32_2d), intent(in), optional :: data_i4_2d_in2 + integer, allocatable, intent(out), optional :: data_i4_2d_out2 (:) - INTEGER, intent(in), optional :: filledvalue_i4 + integer, intent(in), optional :: filledvalue_i4 - ! Local Variables - INTEGER :: totalreq, ireq, nreq, smesg(2), isrc, idest, iproc - INTEGER :: ilon, ilat, xblk, yblk, xloc, yloc, iloc, nx, ny, ix, iy, ig - INTEGER :: ie, ipxstt, ipxend, npxl, ipxl, lb1, xgrdthis, ygrdthis - INTEGER, allocatable :: ylist(:), xlist(:), ipt(:), ibuf(:), rbuf_i4_1d(:) - INTEGER, allocatable :: xsorted(:), ysorted(:), xy2d(:,:) - REAL(r8), allocatable :: area2d(:,:), rbuf_r8_1d(:), rbuf_r8_2d(:,:) - LOGICAL, allocatable :: msk(:) + ! Local Variables + integer :: totalreq, ireq, nreq, smesg(2), isrc, idest, iproc + integer :: ilon, ilat, xblk, yblk, xloc, yloc, iloc, nx, ny, ix, iy, ig + integer :: ie, ipxstt, ipxend, npxl, ipxl, lb1, xgrdthis, ygrdthis + integer, allocatable :: ylist(:), xlist(:), ipt(:), ibuf(:), rbuf_i4_1d(:) + integer, allocatable :: xsorted(:), ysorted(:), xy2d(:,:) + real(r8), allocatable :: area2d(:,:), rbuf_r8_1d(:), rbuf_r8_2d(:,:) + logical, allocatable :: msk(:) ie = pixelset%ielm (iset) @@ -513,24 +457,6 @@ SUBROUTINE aggregation_request_data ( & allocate (data_r8_3d_out2 (lb1:lb1-1+n1_r8_3d_in2,totalreq)) ENDIF - IF (present(data_r8_3d_in3) .and. present(data_r8_3d_out3) .and. present(n1_r8_3d_in3)) THEN - IF (present(lb1_r8_3d_in3)) THEN - lb1 = lb1_r8_3d_in3 - ELSE - lb1 = 1 - ENDIF - allocate (data_r8_3d_out3 (lb1:lb1-1+n1_r8_3d_in3,totalreq)) - ENDIF - - IF (present(data_r8_3d_in4) .and. present(data_r8_3d_out4) .and. present(n1_r8_3d_in4)) THEN - IF (present(lb1_r8_3d_in4)) THEN - lb1 = lb1_r8_3d_in4 - ELSE - lb1 = 1 - ENDIF - allocate (data_r8_3d_out4 (lb1:lb1-1+n1_r8_3d_in4,totalreq)) - ENDIF - IF (present(data_i4_2d_in1) .and. present(data_i4_2d_out1)) THEN allocate (data_i4_2d_out1 (totalreq)) IF (present(filledvalue_i4)) THEN @@ -634,23 +560,6 @@ SUBROUTINE aggregation_request_data ( & deallocate (rbuf_r8_2d) ENDIF - IF (present(data_r8_3d_in3) .and. present(data_r8_3d_out3) .and. present(n1_r8_3d_in3)) THEN - allocate (rbuf_r8_2d (n1_r8_3d_in3,nreq)) - CALL mpi_recv (rbuf_r8_2d, n1_r8_3d_in3*nreq, MPI_REAL8, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL unpack_inplace (rbuf_r8_2d, msk, data_r8_3d_out3) - deallocate (rbuf_r8_2d) - ENDIF - - IF (present(data_r8_3d_in4) .and. present(data_r8_3d_out4) .and. present(n1_r8_3d_in4)) THEN - allocate (rbuf_r8_2d (n1_r8_3d_in4,nreq)) - CALL mpi_recv (rbuf_r8_2d, n1_r8_3d_in4*nreq, MPI_REAL8, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL unpack_inplace (rbuf_r8_2d, msk, data_r8_3d_out4) - deallocate (rbuf_r8_2d) - ENDIF - - allocate (rbuf_i4_1d (nreq)) IF (present(data_i4_2d_in1) .and. present(data_i4_2d_out1)) THEN CALL mpi_recv (rbuf_i4_1d, nreq, MPI_INTEGER, & @@ -716,15 +625,6 @@ SUBROUTINE aggregation_request_data ( & data_r8_3d_out2(:,ireq) = data_r8_3d_in2%blk(xblk,yblk)%val(:,xloc,yloc) ENDIF - IF (present(data_r8_3d_in3) .and. present(data_r8_3d_out3) .and. present(n1_r8_3d_in3)) THEN - data_r8_3d_out3(:,ireq) = data_r8_3d_in3%blk(xblk,yblk)%val(:,xloc,yloc) - ENDIF - - IF (present(data_r8_3d_in4) .and. present(data_r8_3d_out4) .and. present(n1_r8_3d_in4)) THEN - data_r8_3d_out4(:,ireq) = data_r8_3d_in4%blk(xblk,yblk)%val(:,xloc,yloc) - ENDIF - - IF (present(data_i4_2d_in1) .and. present(data_i4_2d_out1)) THEN data_i4_2d_out1(ireq) = data_i4_2d_in1%blk(xblk,yblk)%val(xloc,yloc) ENDIF @@ -743,11 +643,11 @@ END SUBROUTINE aggregation_request_data SUBROUTINE aggregation_worker_done () - USE MOD_SPMD_Task + USE MOD_SPMD_Task - IMPLICIT NONE + IMPLICIT NONE - INTEGER :: smesg(2), iproc, idest + integer :: smesg(2), iproc, idest IF (p_is_worker) THEN DO iproc = 0, p_np_io-1 diff --git a/mksrfdata/MOD_LandCrop.F90 b/mksrfdata/MOD_LandCrop.F90 old mode 100755 new mode 100644 index c82ffbd3..5e60c050 --- a/mksrfdata/MOD_LandCrop.F90 +++ b/mksrfdata/MOD_LandCrop.F90 @@ -19,6 +19,7 @@ MODULE MOD_LandCrop ! ---- Instance ---- type(grid_type) :: gcrop integer, allocatable :: cropclass (:) + real(r8), allocatable :: pctshrpch (:) CONTAINS @@ -53,8 +54,7 @@ SUBROUTINE landcrop_build (lc_year) type(block_data_real8_3d) :: cropdata integer :: sharedfilter(1), cropfilter(1) integer :: iblkme, ib, jb - real(r8), allocatable :: pctshared1 (:) - real(r8), allocatable :: pctshared2 (:) + real(r8), allocatable :: pctshared (:) integer , allocatable :: classshared(:) write(cyear,'(i4.4)') lc_year @@ -67,12 +67,12 @@ SUBROUTINE landcrop_build (lc_year) numpatch = count(SITE_pctcrop > 0.) - allocate (pctshared2 (numpatch)) + allocate (pctshrpch (numpatch)) allocate (cropclass(numpatch)) - cropclass = pack(SITE_croptyp, SITE_pctcrop > 0.) - pctshared2 = pack(SITE_pctcrop, SITE_pctcrop > 0.) + cropclass = pack(SITE_croptyp, SITE_pctcrop > 0.) + pctshrpch = pack(SITE_pctcrop, SITE_pctcrop > 0.) - pctshared2 = pctshared2 / sum(pctshared2) + pctshrpch = pctshrpch / sum(pctshrpch) allocate (landpatch%eindex (numpatch)) allocate (landpatch%ipxstt (numpatch)) @@ -88,7 +88,7 @@ SUBROUTINE landcrop_build (lc_year) landpatch%has_shared = .true. allocate (landpatch%pctshared(numpatch)) - landpatch%pctshared = pctshared2 + landpatch%pctshared = pctshrpch landpatch%nset = numpatch CALL landpatch%set_vecgs @@ -121,7 +121,7 @@ SUBROUTINE landcrop_build (lc_year) sharedfilter = (/ 1 /) CALL pixelsetshared_build (landpatch, gpatch, pctshared_xy, 2, sharedfilter, & - pctshared1, classshared) + pctshared, classshared) IF (p_is_worker) THEN IF (landpatch%nset > 0) THEN @@ -138,7 +138,7 @@ SUBROUTINE landcrop_build (lc_year) cropfilter = (/ CROPLAND /) CALL pixelsetshared_build (landpatch, gcrop, cropdata, N_CFT, cropfilter, & - pctshared2, cropclass, fracin = pctshared1) + pctshrpch, cropclass, fracin = pctshared) numpatch = landpatch%nset @@ -146,12 +146,11 @@ SUBROUTINE landcrop_build (lc_year) IF (p_is_worker) THEN IF (numpatch > 0) THEN allocate(landpatch%pctshared(numpatch)) - landpatch%pctshared = pctshared2 + landpatch%pctshared = pctshrpch ENDIF ENDIF - IF (allocated(pctshared1 )) deallocate(pctshared1 ) - IF (allocated(pctshared2 )) deallocate(pctshared2 ) + IF (allocated(pctshared )) deallocate(pctshared ) IF (allocated(classshared)) deallocate(classshared) #ifdef USEMPI diff --git a/mksrfdata/MOD_LandPFT.F90 b/mksrfdata/MOD_LandPFT.F90 old mode 100755 new mode 100644 index 0c93c4c4..61f83de3 --- a/mksrfdata/MOD_LandPFT.F90 +++ b/mksrfdata/MOD_LandPFT.F90 @@ -108,6 +108,9 @@ SUBROUTINE landpft_build (lc_year) allocate(pft2patch (numpft)) + landpft%has_shared = .true. + allocate (landpft%pctshared (numpft)) + #ifndef CROP IF (patchtypes(landpatch%settyp(1)) == 0) THEN #else @@ -118,6 +121,8 @@ SUBROUTINE landpft_build (lc_year) pft2patch (:) = 1 patch_pft_s(:) = 1 patch_pft_e(:) = numpft + + landpft%pctshared = pack(SITE_pctpfts, SITE_pctpfts > 0.) #ifdef CROP ELSEIF (landpatch%settyp(1) == CROPLAND) THEN DO ipft = 1, numpft @@ -126,6 +131,8 @@ SUBROUTINE landpft_build (lc_year) patch_pft_s (ipft) = ipft patch_pft_e (ipft) = ipft ENDDO + + landpft%pctshared = landpatch%pctshared #endif ENDIF ELSE @@ -144,6 +151,8 @@ SUBROUTINE landpft_build (lc_year) #ifdef USEMPI CALL mpi_barrier (p_comm_glb, p_err) #endif + + landpft%has_shared = .true. IF (p_is_io) THEN @@ -222,6 +231,9 @@ SUBROUTINE landpft_build (lc_year) allocate (landpft%ipxend (numpft)) allocate (landpft%ielm (numpft)) + allocate (landpft%pctshared (numpft)) + landpft%pctshared(:) = 1. + npft = 0 npatch = 0 DO ipatch = 1, numpatch @@ -246,6 +258,8 @@ SUBROUTINE landpft_build (lc_year) landpft%ipxend(npft) = landpatch%ipxend(ipatch) landpft%settyp(npft) = ipft + landpft%pctshared(npft) = pctpft_patch(ipft,ipatch) + pft2patch(npft) = npatch ENDIF ENDDO @@ -260,6 +274,8 @@ SUBROUTINE landpft_build (lc_year) landpft%ipxstt(npft) = landpatch%ipxstt(ipatch) landpft%ipxend(npft) = landpatch%ipxend(ipatch) landpft%settyp(npft) = cropclass(ipatch) + N_PFT - 1 + + landpft%pctshared(npft) = landpatch%pctshared(ipatch) pft2patch(npft) = npatch #endif diff --git a/mksrfdata/MOD_PixelsetShared.F90 b/mksrfdata/MOD_PixelsetShared.F90 old mode 100755 new mode 100644 index 7b60ade6..dbc8c054 --- a/mksrfdata/MOD_PixelsetShared.F90 +++ b/mksrfdata/MOD_PixelsetShared.F90 @@ -150,6 +150,8 @@ SUBROUTINE pixelsetshared_build (pixelset, gshared, datashared, nmaxshared, typf allocate (fracout (nsetshared)) allocate (sharedclass(nsetshared)) + fracout(:) = 1.0 + jpset = 0 DO ipset = 1, pixelset%nset IF (any(typfilter(:) == settyp1(ipset))) THEN diff --git a/share/MOD_Grid.F90 b/share/MOD_Grid.F90 index 7719e2d2..f8d2f2e0 100644 --- a/share/MOD_Grid.F90 +++ b/share/MOD_Grid.F90 @@ -2,61 +2,62 @@ MODULE MOD_Grid - !------------------------------------------------------------------------------- - ! DESCRIPTION: - ! - ! Definition of latitude-longitude grids and data types related to grids. - ! - ! Latitude-longitude grid can be defined by - ! 1. "name" : frequently used grids is predefined in this module; - ! 2. "ndims" : how many longitude and latitude grids are used globally; - ! 3. "res" : longitude and latitude resolutions in radian - ! 4. "center" : longitude and latitude grid centers, and the border lines - ! are defined by center lines of grid centers; the region - ! boundaries is optional. - ! 5. "file" : read grid informations from a file, the variables are - ! 'lat_s', 'lat_n', 'lon_w', 'lon_e' - ! 6. "copy" : copy grid informations from an existing grid - ! - ! Grid centers in radian can be calculated by using "set_rlon" and "set_rlat" - ! - ! Two additional data types are defined: - ! 1. "grid_list_type" : list of grid boxes; - ! 2. "grid_concat_type" : used to concatenate grids distributed in blocks. - ! - ! Created by Shupeng Zhang, May 2023 - !------------------------------------------------------------------------------- +!------------------------------------------------------------------------------- +! DESCRIPTION: +! +! Definition of latitude-longitude grids and data types related to grids. +! +! Latitude-longitude grid can be defined by +! 1. "name" : frequently used grids is predefined in this MODULE; +! 2. "ndims" : how many longitude and latitude grids are used globally; +! 3. "res" : longitude and latitude resolutions in radian +! 4. "center" : longitude and latitude grid centers, and the border lines +! are defined by center lines of grid centers; the region +! boundaries is optional. +! 5. "file" : read grid informations from a file, the variables are +! 'lat_s', 'lat_n', 'lon_w', 'lon_e' +! 6. "copy" : copy grid informations from an existing grid +! +! Grid centers in radian can be calculated by using "set_rlon" and "set_rlat" +! +! Two additional data types are defined: +! 1. "grid_list_type" : list of grid boxes; +! 2. "grid_concat_type" : used to concatenate grids distributed in blocks. +! +! Created by Shupeng Zhang, May 2023 +!------------------------------------------------------------------------------- USE MOD_Precision + USE MOD_UserDefFun IMPLICIT NONE ! ---- data types ---- - TYPE :: grid_type + type :: grid_type - INTEGER :: nlat - INTEGER :: nlon + integer :: nlat + integer :: nlon ! Latitude direction. (yinc = 1) means south to north. - INTEGER :: yinc + integer :: yinc ! Coordinates. - REAL(r8), allocatable :: lat_s (:) - REAL(r8), allocatable :: lat_n (:) - REAL(r8), allocatable :: lon_w (:) - REAL(r8), allocatable :: lon_e (:) + real(r8), allocatable :: lat_s (:) + real(r8), allocatable :: lat_n (:) + real(r8), allocatable :: lon_w (:) + real(r8), allocatable :: lon_e (:) ! Blocks. - INTEGER, allocatable :: xdsp(:), ydsp(:) - INTEGER, allocatable :: xcnt(:), ycnt(:) - INTEGER, allocatable :: xblk(:), yblk(:) - INTEGER, allocatable :: xloc(:), yloc(:) + integer, allocatable :: xdsp(:), ydsp(:) + integer, allocatable :: xcnt(:), ycnt(:) + integer, allocatable :: xblk(:), yblk(:) + integer, allocatable :: xloc(:), yloc(:) ! Mapping to pixels. - INTEGER, allocatable :: xgrd(:), ygrd(:) + integer, allocatable :: xgrd(:), ygrd(:) ! Grid info. - REAL(r8), allocatable :: rlon(:) - REAL(r8), allocatable :: rlat(:) + real(r8), allocatable :: rlon(:) + real(r8), allocatable :: rlat(:) CONTAINS procedure, PUBLIC :: define_by_name => grid_define_by_name @@ -75,21 +76,21 @@ MODULE MOD_Grid final :: grid_free_mem - END TYPE grid_type + END type grid_type ! ---- data types ---- - TYPE :: grid_list_type - INTEGER :: ng - INTEGER, allocatable :: ilat(:) - INTEGER, allocatable :: ilon(:) - END TYPE grid_list_type + type :: grid_list_type + integer :: ng + integer, allocatable :: ilat(:) + integer, allocatable :: ilon(:) + END type grid_list_type type :: segment_type integer :: blk integer :: cnt integer :: bdsp integer :: gdsp - end type segment_type + END type segment_type type :: grid_info_type integer :: nlat, nlon @@ -99,9 +100,9 @@ MODULE MOD_Grid real(r8), allocatable :: lon_e(:) real(r8), allocatable :: lon_c(:) !grid center real(r8), allocatable :: lat_c(:) !grid center - end type grid_info_type + END type grid_info_type - TYPE :: grid_concat_type + type :: grid_concat_type integer :: ndatablk integer :: nxseg, nyseg type(segment_type), allocatable :: xsegs(:), ysegs(:) @@ -109,18 +110,18 @@ MODULE MOD_Grid CONTAINS procedure, PUBLIC :: set => set_grid_concat final :: grid_concat_free_mem - END TYPE grid_concat_type + END type grid_concat_type CONTAINS ! -------------------------------- SUBROUTINE grid_init (this, nlon, nlat) - IMPLICIT NONE - class (grid_type) :: this + IMPLICIT NONE + class (grid_type) :: this - INTEGER, intent(in) :: nlon - INTEGER, intent(in) :: nlat + integer, intent(in) :: nlon + integer, intent(in) :: nlat this%nlat = nlat this%nlon = nlon @@ -139,14 +140,14 @@ END SUBROUTINE grid_init ! -------------------------------- SUBROUTINE grid_define_by_name (this, gridname) - IMPLICIT NONE - class (grid_type) :: this + IMPLICIT NONE + class (grid_type) :: this - CHARACTER(len=*), intent(in) :: gridname + character(len=*), intent(in) :: gridname - ! Local variables - INTEGER :: nlat, nlon, ilat, ilon - REAL(r8) :: del_lat, del_lon + ! Local variables + integer :: nlat, nlon, ilat, ilon + real(r8) :: del_lat, del_lon IF (trim(gridname) == 'merit_90m') THEN @@ -174,34 +175,7 @@ SUBROUTINE grid_define_by_name (this, gridname) CALL this%set_blocks () ENDIF - - ! added by Chen Sisi, used for downscaling module test - IF (trim(gridname) == 'heihe_90m') THEN - nlat = 2*60*20 - nlon = 2*60*20 - - this%nlat = nlat - this%nlon = nlon - - CALL this%init (this%nlon, this%nlat) - - del_lat = 2.0 / nlat - DO ilat = 1, this%nlat - this%lat_s(ilat) = 39 - del_lat * ilat - del_lat/2.0 - this%lat_n(ilat) = 39 - del_lat * (ilat-1) - del_lat/2.0 - ENDDO - del_lon = 2.0 / nlon - DO ilon = 1, this%nlon - this%lon_w(ilon) = 100 + del_lon * (ilon-1) - del_lon/2.0 - this%lon_e(ilon) = 100 + del_lon * ilon - del_lon/2.0 - ENDDO - - CALL this%normalize () - CALL this%set_blocks () - ENDIF - - IF (trim(gridname) == 'colm_5km') THEN CALL this%define_by_ndims (8640,4320) @@ -238,15 +212,15 @@ END SUBROUTINE grid_define_by_name !----------------------------------------------------- SUBROUTINE grid_define_by_ndims (this, lon_points, lat_points) - IMPLICIT NONE - class (grid_type) :: this + IMPLICIT NONE + class (grid_type) :: this - INTEGER, intent(in) :: lon_points - INTEGER, intent(in) :: lat_points + integer, intent(in) :: lon_points + integer, intent(in) :: lat_points - ! Local variables - INTEGER :: ilat, ilon - REAL(r8) :: del_lat, del_lon + ! Local variables + integer :: ilat, ilon + real(r8) :: del_lat, del_lon this%nlat = lat_points this%nlon = lon_points @@ -275,13 +249,13 @@ END SUBROUTINE grid_define_by_ndims !----------------------------------------------------- SUBROUTINE grid_define_by_res (this, lon_res, lat_res) - IMPLICIT NONE - class (grid_type) :: this + IMPLICIT NONE + class (grid_type) :: this - REAL(r8), intent(in) :: lon_res, lat_res + real(r8), intent(in) :: lon_res, lat_res - ! Local variables - INTEGER :: lon_points, lat_points + ! Local variables + integer :: lon_points, lat_points lon_points = nint(360.0/lon_res) lat_points = nint(180.0/lat_res) @@ -294,17 +268,17 @@ END SUBROUTINE grid_define_by_res SUBROUTINE grid_define_by_center (this, lat_in, lon_in, & south, north, west, east) - USE MOD_Precision - USE MOD_Utils - IMPLICIT NONE - class (grid_type) :: this + USE MOD_Precision + USE MOD_Utils + IMPLICIT NONE + class (grid_type) :: this - REAL(r8), intent(in) :: lat_in(:), lon_in(:) - REAL(r8), intent(in), optional :: south, north, west, east + real(r8), intent(in) :: lat_in(:), lon_in(:) + real(r8), intent(in), optional :: south, north, west, east - ! Local variables - INTEGER :: ilat, ilon, ilone, ilonw - REAL(r8), allocatable :: lon_in_n(:) + ! Local variables + integer :: ilat, ilon, ilone, ilonw + real(r8), allocatable :: lon_in_n(:) this%nlat = size(lat_in) this%nlon = size(lon_in) @@ -403,16 +377,16 @@ END SUBROUTINE grid_define_by_center !----------------------------------------------------- SUBROUTINE grid_define_from_file (this, filename, latname, lonname) - USE MOD_NetCDFSerial - IMPLICIT NONE - class (grid_type) :: this + USE MOD_NetCDFSerial + IMPLICIT NONE + class (grid_type) :: this - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in), optional :: latname, lonname + character(len=*), intent(in) :: filename + character(len=*), intent(in), optional :: latname, lonname - ! Local Variables - real(r8), allocatable :: lon_in(:) - real(r8), allocatable :: lat_in(:) + ! Local Variables + real(r8), allocatable :: lon_in(:) + real(r8), allocatable :: lat_in(:) IF (.not. (present(latname) .and. present(lonname))) THEN @@ -429,9 +403,9 @@ SUBROUTINE grid_define_from_file (this, filename, latname, lonname) ELSE - call ncio_read_bcast_serial (filename, latname, lat_in) - call ncio_read_bcast_serial (filename, lonname, lon_in) - call this%define_by_center (lat_in, lon_in) + CALL ncio_read_bcast_serial (filename, latname, lat_in) + CALL ncio_read_bcast_serial (filename, lonname, lon_in) + CALL this%define_by_center (lat_in, lon_in) deallocate (lat_in, lon_in) ENDIF @@ -441,11 +415,11 @@ END SUBROUTINE grid_define_from_file !----------------------------------------------------- SUBROUTINE grid_define_by_copy (this, grid_in) - USE MOD_NetCDFSerial - IMPLICIT NONE - class (grid_type) :: this + USE MOD_NetCDFSerial + IMPLICIT NONE + class (grid_type) :: this - TYPE(grid_type) :: grid_in + type(grid_type) :: grid_in CALL this%init (grid_in%nlon, grid_in%nlat) @@ -462,12 +436,12 @@ END SUBROUTINE grid_define_by_copy !----------------------------------------------------- SUBROUTINE grid_normalize (this) - USE MOD_Utils - IMPLICIT NONE - class(grid_type) :: this + USE MOD_Utils + IMPLICIT NONE + class(grid_type) :: this - ! Local variable - INTEGER :: ilon, ilat + ! Local variable + integer :: ilon, ilat DO ilon = 1, this%nlon CALL normalize_longitude (this%lon_w(ilon)) @@ -490,16 +464,16 @@ END SUBROUTINE grid_normalize !----------------------------------------------------- SUBROUTINE grid_set_blocks (this) - USE MOD_Namelist - USE MOD_Block - USE MOD_Utils - IMPLICIT NONE + USE MOD_Namelist + USE MOD_Block + USE MOD_Utils + IMPLICIT NONE - class (grid_type) :: this + class (grid_type) :: this - ! Local variables - INTEGER :: ilat, ilon, iblk, jblk, ilon_e - REAL(r8) :: edges, edgen, edgew, edgee + ! Local variables + integer :: ilat, ilon, iblk, jblk, ilon_e + real(r8) :: edges, edgen, edgew, edgee IF (allocated(this%xcnt)) deallocate(this%xcnt) IF (allocated(this%xdsp)) deallocate(this%xdsp) @@ -543,7 +517,7 @@ SUBROUTINE grid_set_blocks (this) this%ydsp(jblk) = ilat - 1 - DO while (ilat <= this%nlat) + DO WHILE (ilat <= this%nlat) IF (this%lat_s(ilat) < edgen) THEN IF (this%lat_s(ilat) < gblock%lat_n(jblk)) THEN @@ -558,11 +532,11 @@ SUBROUTINE grid_set_blocks (this) IF (jblk <= gblock%nyblk) THEN this%ydsp(jblk) = ilat - 1 ELSE - exit + EXIT ENDIF ENDIF ELSE - exit + EXIT ENDIF ENDDO @@ -581,7 +555,7 @@ SUBROUTINE grid_set_blocks (this) this%ydsp(jblk) = ilat - 1 - DO while (ilat <= this%nlat) + DO WHILE (ilat <= this%nlat) IF (this%lat_n(ilat) > edges) THEN IF (this%lat_n(ilat) > gblock%lat_s(jblk)) THEN @@ -596,11 +570,11 @@ SUBROUTINE grid_set_blocks (this) IF (jblk >= 1) THEN this%ydsp(jblk) = ilat - 1 ELSE - exit + EXIT ENDIF ENDIF ELSE - exit + EXIT ENDIF ENDDO @@ -627,7 +601,7 @@ SUBROUTINE grid_set_blocks (this) ilon_e = ilon - 1 IF (ilon_e == 0) ilon_e = this%nlon ilon = mod(ilon,this%nlon) + 1 - DO while (.true.) + DO WHILE (.true.) IF (lon_between_floor(this%lon_w(ilon), edgew, edgee)) THEN IF (lon_between_floor(this%lon_w(ilon), gblock%lon_w(iblk), gblock%lon_e(iblk))) THEN @@ -639,7 +613,7 @@ SUBROUTINE grid_set_blocks (this) IF (ilon /= ilon_e) THEN ilon = mod(ilon,this%nlon) + 1 ELSE - exit + EXIT ENDIF ELSE iblk = mod(iblk,gblock%nxblk) + 1 @@ -651,7 +625,7 @@ SUBROUTINE grid_set_blocks (this) this%xdsp(iblk) = ilon - 1 this%xcnt(iblk) = 0 - DO while (.true.) + DO WHILE (.true.) this%xcnt(iblk) = this%xcnt(iblk) + 1 this%xblk(ilon) = iblk this%xloc(ilon) = this%xcnt(iblk) @@ -659,15 +633,15 @@ SUBROUTINE grid_set_blocks (this) IF (ilon /= ilon_e) THEN ilon = mod(ilon,this%nlon) + 1 ELSE - exit + EXIT ENDIF ENDDO - exit + EXIT ENDIF ENDIF ELSE - exit + EXIT ENDIF ENDDO @@ -676,16 +650,16 @@ END SUBROUTINE grid_set_blocks !----------- SUBROUTINE grid_set_rlon (this) - USE MOD_Precision - USE MOD_Utils - USE MOD_Vars_Global, only : pi - IMPLICIT NONE + USE MOD_Precision + USE MOD_Utils + USE MOD_Vars_Global, only : pi + IMPLICIT NONE - class (grid_type) :: this + class (grid_type) :: this - ! Local variables - INTEGER :: ix - REAL(r8) :: lon + ! Local variables + integer :: ix + real(r8) :: lon IF (.not. allocated(this%rlon)) THEN allocate (this%rlon(this%nlon)) @@ -708,15 +682,15 @@ END SUBROUTINE grid_set_rlon !----------- SUBROUTINE grid_set_rlat (this) - USE MOD_Precision - USE MOD_Utils - USE MOD_Vars_Global, only : pi - IMPLICIT NONE + USE MOD_Precision + USE MOD_Utils + USE MOD_Vars_Global, only : pi + IMPLICIT NONE - class (grid_type) :: this + class (grid_type) :: this - ! Local variables - INTEGER :: iy + ! Local variables + integer :: iy IF (.not. allocated(this%rlat)) THEN allocate (this%rlat(this%nlat)) @@ -732,8 +706,8 @@ END SUBROUTINE grid_set_rlat !--------- SUBROUTINE grid_free_mem (this) - IMPLICIT NONE - TYPE (grid_type) :: this + IMPLICIT NONE + type (grid_type) :: this IF (allocated (this%lat_s)) deallocate (this%lat_s) IF (allocated (this%lat_n)) deallocate (this%lat_n) @@ -761,21 +735,21 @@ SUBROUTINE grid_free_mem (this) END SUBROUTINE grid_free_mem !---------- - subroutine set_grid_concat (this, grid) + SUBROUTINE set_grid_concat (this, grid) - use MOD_Block - USE MOD_Utils - implicit none + USE MOD_Block + USE MOD_Utils + IMPLICIT NONE - class(grid_concat_type) :: this - type(grid_type), intent(in) :: grid + class(grid_concat_type) :: this + type(grid_type), intent(in) :: grid - ! Local variables - integer :: ilat_l, ilat_u, ilat, ilatloc, jblk, iyseg - integer :: ilon_w, ilon_e, ilon, ilonloc, iblk, ixseg + ! Local variables + integer :: ilat_l, ilat_u, ilat, ilatloc, jblk, iyseg + integer :: ilon_w, ilon_e, ilon, ilonloc, iblk, ixseg - ilat_l = findloc(grid%yblk /= 0, .true., dim=1) - ilat_u = findloc(grid%yblk /= 0, .true., dim=1, back=.true.) + ilat_l = findloc_ud(grid%yblk /= 0) + ilat_u = findloc_ud(grid%yblk /= 0, back=.true.) this%ginfo%nlat = ilat_u - ilat_l + 1 IF (allocated(this%ginfo%lat_s)) deallocate(this%ginfo%lat_s) @@ -788,66 +762,66 @@ subroutine set_grid_concat (this, grid) this%nyseg = 0 jblk = 0 ilatloc = 0 - do ilat = ilat_l, ilat_u - if (grid%yblk(ilat) /= jblk) then + DO ilat = ilat_l, ilat_u + IF (grid%yblk(ilat) /= jblk) THEN this%nyseg = this%nyseg + 1 jblk = grid%yblk(ilat) - end if + ENDIF ilatloc = ilatloc + 1 this%ginfo%lat_s(ilatloc) = grid%lat_s(ilat) this%ginfo%lat_n(ilatloc) = grid%lat_n(ilat) this%ginfo%lat_c(ilatloc) = (grid%lat_s(ilat)+grid%lat_n(ilat)) * 0.5 - end do + ENDDO IF (allocated(this%ysegs)) deallocate(this%ysegs) allocate (this%ysegs (this%nyseg)) iyseg = 0 jblk = 0 - do ilat = ilat_l, ilat_u - if (grid%yblk(ilat) /= jblk) then + DO ilat = ilat_l, ilat_u + IF (grid%yblk(ilat) /= jblk) THEN iyseg = iyseg + 1 jblk = grid%yblk(ilat) this%ysegs(iyseg)%blk = jblk this%ysegs(iyseg)%bdsp = grid%yloc(ilat) - 1 this%ysegs(iyseg)%gdsp = ilat - ilat_l this%ysegs(iyseg)%cnt = 1 - else + ELSE this%ysegs(iyseg)%cnt = this%ysegs(iyseg)%cnt + 1 - end if - end do + ENDIF + ENDDO - if (all(grid%xblk > 0)) then + IF (all(grid%xblk > 0)) THEN ilon_w = 1 ilon_e = grid%nlon - else - ilon_w = findloc(grid%xblk /= 0, .true., dim=1) - do while (.true.) + ELSE + ilon_w = findloc_ud(grid%xblk /= 0) + DO WHILE (.true.) ilon = ilon_w - 1 - if (ilon == 0) ilon = grid%nlon + IF (ilon == 0) ilon = grid%nlon - if (grid%xblk(ilon) /= 0) then + IF (grid%xblk(ilon) /= 0) THEN ilon_w = ilon - else - exit - end if - end do + ELSE + EXIT + ENDIF + ENDDO ilon_e = ilon_w - do while (.true.) + DO WHILE (.true.) ilon = mod(ilon_e,grid%nlon) + 1 - if (grid%xblk(ilon) /= 0) then + IF (grid%xblk(ilon) /= 0) THEN ilon_e = ilon - else - exit - end if - end do - end if + ELSE + EXIT + ENDIF + ENDDO + ENDIF this%ginfo%nlon = ilon_e - ilon_w + 1 - if (this%ginfo%nlon <= 0) THEN + IF (this%ginfo%nlon <= 0) THEN this%ginfo%nlon = this%ginfo%nlon + grid%nlon ENDIF @@ -862,12 +836,12 @@ subroutine set_grid_concat (this, grid) ilon = ilon_w - 1 iblk = 0 ilonloc = 0 - do while (.true.) + DO WHILE (.true.) ilon = mod(ilon,grid%nlon) + 1 - if (grid%xblk(ilon) /= iblk) then + IF (grid%xblk(ilon) /= iblk) THEN this%nxseg = this%nxseg + 1 iblk = grid%xblk(ilon) - end if + ENDIF ilonloc = ilonloc + 1 this%ginfo%lon_w(ilonloc) = grid%lon_w(ilon) @@ -879,8 +853,8 @@ subroutine set_grid_concat (this, grid) CALL normalize_longitude (this%ginfo%lon_c(ilonloc)) ENDIF - if (ilon == ilon_e) exit - end do + IF (ilon == ilon_e) EXIT + ENDDO DO ilon = 2, this%ginfo%nlon IF ((this%ginfo%lon_c(ilon) < this%ginfo%lon_c(ilon-1)) & @@ -896,43 +870,43 @@ subroutine set_grid_concat (this, grid) iblk = 0 ilon = ilon_w - 1 ilonloc = 0 - do while (.true.) + DO WHILE (.true.) ilon = mod(ilon,grid%nlon) + 1 ilonloc = ilonloc + 1 - if (grid%xblk(ilon) /= iblk) then + IF (grid%xblk(ilon) /= iblk) THEN ixseg = ixseg + 1 iblk = grid%xblk(ilon) this%xsegs(ixseg)%blk = iblk this%xsegs(ixseg)%bdsp = grid%xloc(ilon) - 1 this%xsegs(ixseg)%gdsp = ilonloc - 1 this%xsegs(ixseg)%cnt = 1 - else + ELSE this%xsegs(ixseg)%cnt = this%xsegs(ixseg)%cnt + 1 - end if + ENDIF - if (ilon == ilon_e) exit - end do + IF (ilon == ilon_e) EXIT + ENDDO this%ndatablk = 0 - do iyseg = 1, this%nyseg - do ixseg = 1, this%nxseg + DO iyseg = 1, this%nyseg + DO ixseg = 1, this%nxseg iblk = this%xsegs(ixseg)%blk jblk = this%ysegs(iyseg)%blk - if (gblock%pio(iblk,jblk) >= 0) then + IF (gblock%pio(iblk,jblk) >= 0) THEN this%ndatablk = this%ndatablk + 1 - end if - end do - end do + ENDIF + ENDDO + ENDDO - end subroutine set_grid_concat + END SUBROUTINE set_grid_concat !------- SUBROUTINE grid_concat_free_mem (this) - IMPLICIT NONE + IMPLICIT NONE - TYPE(grid_concat_type) :: this + type(grid_concat_type) :: this IF (allocated(this%xsegs)) deallocate(this%xsegs) IF (allocated(this%ysegs)) deallocate(this%ysegs) diff --git a/share/MOD_InterpArealWeight.F90 b/share/MOD_InterpArealWeight.F90 deleted file mode 100644 index f34e1417..00000000 --- a/share/MOD_InterpArealWeight.F90 +++ /dev/null @@ -1,1839 +0,0 @@ -#include - -MODULE MOD_InterpArealWeight - -!----------------------------------------------------------------------- -! DESCRIPTION: -! -! Areal Weighted Interpolation module. -! -! Created by Shupeng Zhang, May 2024 -!----------------------------------------------------------------------- - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - IMPLICIT NONE - - ! ------ - type :: interp_arealweighted_type - - type(grid_type) :: grid - - type(grid_list_type), allocatable :: glist (:) - - integer :: npset - integer, allocatable :: nsect(:) - type(pointer_int32_2d), allocatable :: address (:) - type(pointer_real8_1d), allocatable :: areasect(:) ! intersection area - - logical :: has_missing_value = .false. - real(r8) :: missing_value - real(r8), allocatable :: areapset(:) - type(block_data_real8_2d) :: areagrid - - CONTAINS - - procedure, PUBLIC :: build => interp_arealweighted_build - procedure, PUBLIC :: set_missing_value => interp_arealweighted_set_missing_value - - ! 1) from pixelset to grid - procedure, PRIVATE :: pset2grid_2d => interp_arealweighted_pset2grid_2d - procedure, PRIVATE :: pset2grid_3d => interp_arealweighted_pset2grid_3d - procedure, PRIVATE :: pset2grid_4d => interp_arealweighted_pset2grid_4d - generic, PUBLIC :: pset2grid => pset2grid_2d, pset2grid_3d, pset2grid_4d - - procedure, PUBLIC :: pset2grid_split => interp_arealweighted_pset2grid_split_3d - - procedure, PUBLIC :: get_sumarea => interp_arealweighted_get_sumarea - - ! 2) from grid to pixelset - procedure, PRIVATE :: grid2pset_2d => interp_arealweighted_grid2pset_2d - procedure, PRIVATE :: grid2pset_3d => interp_arealweighted_grid2pset_3d - generic, PUBLIC :: grid2pset => grid2pset_2d, grid2pset_3d - - procedure, PUBLIC :: grid2pset_dominant => interp_arealweighted_dominant_2d - - ! 3) between grid and intersections - procedure, PUBLIC :: grid2sect => interp_arealweighted_grid2sect - procedure, PUBLIC :: sect2grid => interp_arealweighted_sect2grid - - final :: interp_arealweighted_free_mem - - END type interp_arealweighted_type - -!----------------------- -CONTAINS - - !------------------------------------------ - SUBROUTINE interp_arealweighted_build (this, fgrid, pixelset) - - USE MOD_Precision - USE MOD_Namelist - USE MOD_Block - USE MOD_Pixel - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_Mesh - USE MOD_Utils - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(grid_type), intent(in) :: fgrid - type(pixelset_type), intent(in) :: pixelset - - ! Local variables - type(pointer_real8_1d), allocatable :: afrac(:) - type(grid_list_type), allocatable :: gfrom(:) - type(pointer_int32_1d), allocatable :: list_lat(:) - integer, allocatable :: ng_lat(:) - integer, allocatable :: ys(:), yn(:), xw(:), xe(:) - integer, allocatable :: xlist(:), ylist(:) - integer, allocatable :: ipt(:) - logical, allocatable :: msk(:) - - integer :: ie, iset - integer :: ng, ig, ng_all, iloc - integer :: npxl, ipxl, ilat, ilon - integer :: iworker, iproc, idest, isrc, nrecv - integer :: rmesg(2), smesg(2) - integer :: iy, ix, xblk, yblk, xloc, yloc - real(r8) :: lat_s, lat_n, lon_w, lon_e, area - logical :: is_new - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (p_is_master) THEN - write(*,"('Making mapping from pixel set to grid: ', I7, A, I7, A)") & - fgrid%nlat, ' grids in latitude', fgrid%nlon, ' grids in longitude' - ENDIF - - IF (allocated(this%grid%xblk)) deallocate(this%grid%xblk) - IF (allocated(this%grid%yblk)) deallocate(this%grid%yblk) - IF (allocated(this%grid%xloc)) deallocate(this%grid%xloc) - IF (allocated(this%grid%yloc)) deallocate(this%grid%yloc) - allocate (this%grid%xblk (size(fgrid%xblk))) - allocate (this%grid%yblk (size(fgrid%yblk))) - allocate (this%grid%xloc (size(fgrid%xloc))) - allocate (this%grid%yloc (size(fgrid%yloc))) - - this%grid%xblk = fgrid%xblk - this%grid%yblk = fgrid%yblk - this%grid%xloc = fgrid%xloc - this%grid%yloc = fgrid%yloc - - this%npset = pixelset%nset - - IF (p_is_worker) THEN - - allocate (afrac (pixelset%nset)) - allocate (gfrom (pixelset%nset)) - - allocate (ys (pixel%nlat)) - allocate (yn (pixel%nlat)) - allocate (xw (pixel%nlon)) - allocate (xe (pixel%nlon)) - - DO ilat = 1, pixel%nlat - ys(ilat) = find_nearest_south (pixel%lat_s(ilat), fgrid%nlat, fgrid%lat_s) - yn(ilat) = find_nearest_north (pixel%lat_n(ilat), fgrid%nlat, fgrid%lat_n) - ENDDO - - DO ilon = 1, pixel%nlon - xw(ilon) = find_nearest_west (pixel%lon_w(ilon), fgrid%nlon, fgrid%lon_w) - xe(ilon) = find_nearest_east (pixel%lon_e(ilon), fgrid%nlon, fgrid%lon_e) - ENDDO - - allocate (list_lat (fgrid%nlat)) - DO iy = 1, fgrid%nlat - allocate (list_lat(iy)%val (100)) - ENDDO - - allocate (ng_lat (fgrid%nlat)) - ng_lat(:) = 0 - - DO iset = 1, pixelset%nset - - ie = pixelset%ielm(iset) - npxl = pixelset%ipxend(iset) - pixelset%ipxstt(iset) + 1 - - allocate (afrac(iset)%val (npxl)) - allocate (gfrom(iset)%ilat(npxl)) - allocate (gfrom(iset)%ilon(npxl)) - - gfrom(iset)%ng = 0 - DO ipxl = pixelset%ipxstt(iset), pixelset%ipxend(iset) - - ilat = mesh(ie)%ilat(ipxl) - ilon = mesh(ie)%ilon(ipxl) - - DO iy = ys(ilat), yn(ilat), fgrid%yinc - - lat_s = max(fgrid%lat_s(iy), pixel%lat_s(ilat)) - lat_n = min(fgrid%lat_n(iy), pixel%lat_n(ilat)) - - IF ((lat_n-lat_s) < 1.0e-6_r8) THEN - CYCLE - ENDIF - - ix = xw(ilon) - DO WHILE (.true.) - - IF (ix == xw(ilon)) THEN - lon_w = pixel%lon_w(ilon) - ELSE - lon_w = fgrid%lon_w(ix) - ENDIF - - IF (ix == xe(ilon)) THEN - lon_e = pixel%lon_e(ilon) - ELSE - lon_e = fgrid%lon_e(ix) - ENDIF - - IF (lon_e > lon_w) THEN - IF ((lon_e-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ELSE - IF ((lon_e+360.0_r8-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ENDIF - - area = areaquad (lat_s, lat_n, lon_w, lon_e) - - CALL insert_into_sorted_list2 ( ix, iy, & - gfrom(iset)%ng, gfrom(iset)%ilon, gfrom(iset)%ilat, & - iloc, is_new) - - IF (is_new) THEN - IF (iloc < gfrom(iset)%ng) THEN - afrac(iset)%val(iloc+1:gfrom(iset)%ng) & - = afrac(iset)%val(iloc:gfrom(iset)%ng-1) - ENDIF - - afrac(iset)%val(iloc) = area - ELSE - afrac(iset)%val(iloc) = afrac(iset)%val(iloc) + area - ENDIF - - IF (gfrom(iset)%ng == size(gfrom(iset)%ilat)) THEN - CALL expand_list (gfrom(iset)%ilat, 0.2_r8) - CALL expand_list (gfrom(iset)%ilon, 0.2_r8) - CALL expand_list (afrac(iset)%val, 0.2_r8) - ENDIF - - CALL insert_into_sorted_list1 ( & - ix, ng_lat(iy), list_lat(iy)%val, iloc) - - IF (ng_lat(iy) == size(list_lat(iy)%val)) THEN - CALL expand_list (list_lat(iy)%val, 0.2_r8) - ENDIF - - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - ENDDO - ENDDO - - ENDDO - ENDDO - - deallocate (ys) - deallocate (yn) - deallocate (xw) - deallocate (xe) - - ng_all = sum(ng_lat) - allocate (xlist(ng_all)) - allocate (ylist(ng_all)) - - ig = 0 - DO iy = 1, fgrid%nlat - IF (ng_lat(iy) > 0) THEN - DO ix = 1, ng_lat(iy) - ig = ig + 1 - xlist(ig) = list_lat(iy)%val(ix) - ylist(ig) = iy - ENDDO - ENDIF - ENDDO - - deallocate (ng_lat) - DO iy = 1, fgrid%nlat - deallocate (list_lat(iy)%val) - ENDDO - deallocate (list_lat) - -#ifdef USEMPI - allocate (ipt (ng_all)) - allocate (msk (ng_all)) - DO ig = 1, ng_all - xblk = fgrid%xblk(xlist(ig)) - yblk = fgrid%yblk(ylist(ig)) - ipt(ig) = gblock%pio(xblk,yblk) - ENDDO -#endif - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 -#ifdef USEMPI - msk = (ipt == p_address_io(iproc)) - ng = count(msk) -#else - ng = ng_all -#endif - - allocate (this%glist(iproc)%ilat (ng)) - allocate (this%glist(iproc)%ilon (ng)) - - this%glist(iproc)%ng = 0 - ENDDO - - DO ig = 1, ng_all -#ifdef USEMPI - iproc = p_itis_io(ipt(ig)) -#else - iproc = 0 -#endif - - this%glist(iproc)%ng = this%glist(iproc)%ng + 1 - - ng = this%glist(iproc)%ng - this%glist(iproc)%ilon(ng) = xlist(ig) - this%glist(iproc)%ilat(ng) = ylist(ig) - ENDDO - -#ifdef USEMPI - deallocate (ipt) - deallocate (msk) -#endif - - IF (allocated(this%address )) deallocate(this%address) - IF (allocated(this%areasect)) deallocate(this%areasect) - allocate (this%address (pixelset%nset)) - allocate (this%areasect (pixelset%nset)) - - allocate (this%nsect (pixelset%nset)) - - DO iset = 1, pixelset%nset - - ng = gfrom(iset)%ng - - this%nsect(iset) = ng - - allocate (this%address(iset)%val (2,ng)) - allocate (this%areasect(iset)%val (ng)) - - this%areasect(iset)%val = afrac(iset)%val(1:ng) - - IF (pixelset%has_shared) THEN - this%areasect(iset)%val = this%areasect(iset)%val * pixelset%pctshared(iset) - ENDIF - - DO ig = 1, gfrom(iset)%ng - ilon = gfrom(iset)%ilon(ig) - ilat = gfrom(iset)%ilat(ig) - xblk = fgrid%xblk(ilon) - yblk = fgrid%yblk(ilat) - -#ifdef USEMPI - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - - this%address(iset)%val(1,ig) = iproc - this%address(iset)%val(2,ig) = find_in_sorted_list2 ( & - ilon, ilat, this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ENDDO - ENDDO - - deallocate (xlist) - deallocate (ylist) - - DO iset = 1, pixelset%nset - deallocate (afrac(iset)%val ) - deallocate (gfrom(iset)%ilon) - deallocate (gfrom(iset)%ilat) - ENDDO - - deallocate (afrac) - deallocate (gfrom) - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - idest = p_address_io(iproc) - smesg = (/p_iam_glb, this%glist(iproc)%ng/) - - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iproc)%ng > 0) THEN - CALL mpi_send (this%glist(iproc)%ilon, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iproc)%ilat, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - -#ifdef USEMPI - IF (p_is_io) THEN - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_worker-1)) - - DO iworker = 0, p_np_worker-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_worker(isrc) - - this%glist(iproc)%ng = nrecv - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - ENDDO - ENDIF -#endif - - IF (p_is_worker) THEN - this%areapset(:) = 0. - DO iset = 1, this%npset - IF (this%nsect(iset) > 0) THEN - this%areapset(iset) = sum(this%areasect(iset)%val) - ENDIF - ENDDO - ENDIF - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - END SUBROUTINE interp_arealweighted_build - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_set_missing_value (this, gdata, missing_value, pmask) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(in) :: missing_value - logical, intent(inout) :: pmask(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - this%has_missing_value = .true. - this%missing_value = missing_value - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - IF (this%npset > 0) THEN - allocate (this%areapset (this%npset)) - ENDIF - - DO iset = 1, this%npset - - pmask(iset) = .false. - this%areapset(iset) = 0. - - IF (this%nsect(iset) > 0) THEN - DO ig = 1, this%nsect(iset) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (pbuff(iproc)%val(iloc) /= missing_value) THEN - pmask(iset) = .true. - this%areapset(iset) = this%areapset(iset) + this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_arealweighted_set_missing_value - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_pset2grid_2d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - real(r8), intent(in) :: pdata(:) - type(block_data_real8_2d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:) = spv - ELSE - pbuff(iproc)%val(:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - - IF (present(spv)) THEN - IF (pdata(iset) == spv) CYCLE - ENDIF - - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%areasect(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (present(spv)) THEN - IF (pbuff(iproc)%val(iloc) /= spv) THEN - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%areasect(iset)%val(ig) - ELSE - pbuff(iproc)%val(iloc) = & - pdata(iset) * this%areasect(iset)%val(ig) - ENDIF - ELSE - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - IF (present(spv)) THEN - IF (gbuff(ig) /= spv) THEN - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - IF (gdata%blk(xblk,yblk)%val(xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(xloc,yloc) = & - gdata%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ELSE - gdata%blk(xblk,yblk)%val(xloc,yloc) = gbuff(ig) - ENDIF - ENDIF - ELSE - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gdata%blk(xblk,yblk)%val(xloc,yloc) = & - gdata%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ENDIF - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - - END SUBROUTINE interp_arealweighted_pset2grid_2d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_pset2grid_3d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - real(r8), intent(in) :: pdata(:,:) - type(block_data_real8_3d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset - integer :: xblk, yblk, xloc, yloc - integer :: lb1, ub1, i1 - - real(r8), allocatable :: gbuff(:,:) - type(pointer_real8_2d), allocatable :: pbuff(:) - - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - lb1 = lbound(pdata,1) - ub1 = ubound(pdata,1) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (lb1:ub1, this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:,:) = spv - ELSE - pbuff(iproc)%val(:,:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%areasect(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - DO i1 = lb1, ub1 - IF (present(spv)) THEN - IF (pdata(i1,iset) /= spv) THEN - IF (pbuff(iproc)%val(i1,iloc) /= spv) THEN - pbuff(iproc)%val(i1,iloc) = pbuff(iproc)%val(i1,iloc) & - + pdata(i1,iset) * this%areasect(iset)%val(ig) - ELSE - pbuff(iproc)%val(i1,iloc) = & - pdata(i1,iset) * this%areasect(iset)%val(ig) - ENDIF - ENDIF - ELSE - pbuff(iproc)%val(i1,iloc) = pbuff(iproc)%val(i1,iloc) & - + pdata(i1,iset) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, & - (ub1-lb1+1) * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - lb1 = gdata%lb1 - ub1 = gdata%ub1 - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (lb1:ub1, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, & - (ub1-lb1+1) * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - DO i1 = lb1, ub1 - IF (present(spv)) THEN - IF (gbuff(i1,ig) /= spv) THEN - IF (gdata%blk(xblk,yblk)%val(i1,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) + gbuff(i1,ig) - ELSE - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = gbuff(i1,ig) - ENDIF - ENDIF - ELSE - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) + gbuff(i1,ig) - ENDIF - ENDDO - ENDDO - - deallocate (gbuff) - ENDIF - - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE interp_arealweighted_pset2grid_3d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_pset2grid_4d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - real(r8), intent(in) :: pdata(:,:,:) - type(block_data_real8_4d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset - integer :: xblk, yblk, xloc, yloc - integer :: lb1, ub1, i1, ndim1, lb2, ub2, i2, ndim2 - - real(r8), allocatable :: gbuff(:,:,:) - type(pointer_real8_3d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - lb1 = lbound(pdata,1) - ub1 = ubound(pdata,1) - ndim1 = ub1 - lb1 + 1 - - lb2 = lbound(pdata,2) - ub2 = ubound(pdata,2) - ndim2 = ub2 - lb2 + 1 - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (lb1:ub1, lb2:ub2, this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:,:,:) = spv - ELSE - pbuff(iproc)%val(:,:,:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%areasect(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - DO i1 = lb1, ub1 - DO i2 = lb2, ub2 - IF (present(spv)) THEN - IF (pdata(i1,i2,iset) /= spv) THEN - IF (pbuff(iproc)%val(i1,i2,iloc) /= spv) THEN - pbuff(iproc)%val(i1,i2,iloc) = pbuff(iproc)%val(i1,i2,iloc) & - + pdata(i1,i2,iset) * this%areasect(iset)%val(ig) - ELSE - pbuff(iproc)%val(i1,i2,iloc) = & - pdata(i1,i2,iset) * this%areasect(iset)%val(ig) - ENDIF - ENDIF - ELSE - pbuff(iproc)%val(i1,i2,iloc) = pbuff(iproc)%val(i1,i2,iloc) & - + pdata(i1,i2,iset) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDDO - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, ndim1 * ndim2 * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - lb1 = gdata%lb1 - ub1 = gdata%ub1 - ndim1 = ub1 - lb1 + 1 - - lb2 = gdata%lb2 - ub2 = gdata%ub2 - ndim2 = ub2 - lb2 + 1 - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (lb1:ub1, lb2:ub2, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, ndim1 * ndim2 * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - DO i1 = lb1, ub1 - DO i2 = lb2, ub2 - IF (present(spv)) THEN - IF (gbuff(i1,i2,ig) /= spv) THEN - IF (gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) + gbuff(i1,i2,ig) - ELSE - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = gbuff(i1,i2,ig) - ENDIF - ENDIF - ELSE - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) + gbuff(i1,i2,ig) - ENDIF - ENDDO - ENDDO - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE interp_arealweighted_pset2grid_4d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_pset2grid_split_3d (this, pdata, settyp, typidx, gdata, spv) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - real(r8), intent(in) :: pdata (:) - integer , intent(in) :: settyp(:) - integer , intent(in) :: typidx(:) - type(block_data_real8_3d), intent(inout) :: gdata - - real(r8), intent(in) :: spv - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset, ityp, ntyps - integer :: xblk, yblk, xloc, yloc - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff (:) - - IF (p_is_worker) THEN - allocate (pbuff (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - ENDIF - ENDDO - ENDIF - - IF (p_is_io) THEN - CALL flush_block_data (gdata, spv) - ENDIF - - ntyps = size(typidx) - - DO ityp = 1, ntyps - - IF (p_is_worker) THEN - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - pbuff(iproc)%val(:) = spv - ENDIF - ENDDO - - DO iset = 1, this%npset - IF ((settyp(iset) == typidx(ityp)) .and. (pdata(iset) /= spv)) THEN - DO ig = 1, size(this%areasect(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (pbuff(iproc)%val(iloc) /= spv) THEN - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%areasect(iset)%val(ig) - ELSE - pbuff(iproc)%val(iloc) = & - pdata(iset) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDIF - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - IF (gbuff(ig) /= spv) THEN - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - IF (gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) + gbuff(ig) - ELSE - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) = gbuff(ig) - ENDIF - ENDIF - ENDDO - - deallocate (gbuff) - ENDIF - - ENDDO - - ENDIF - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - ENDDO - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE interp_arealweighted_pset2grid_split_3d - - ! ------------------------------ - SUBROUTINE interp_arealweighted_get_sumarea (this, filter, sumarea) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - logical, intent(in) :: filter(:) - type(block_data_real8_2d), intent(inout) :: sumarea - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - pbuff(iproc)%val(:) = 0.0 - ENDIF - ENDDO - - DO iset = 1, this%npset - - IF (.not. filter(iset)) CYCLE - - DO ig = 1, size(this%areasect(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) + this%areasect(iset)%val(ig) - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - CALL flush_block_data (sumarea, 0.0_r8) - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - sumarea%blk(xblk,yblk)%val(xloc,yloc) = & - sumarea%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - - END SUBROUTINE interp_arealweighted_get_sumarea - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_grid2pset_2d (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(out) :: pdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - pdata(:) = spval - - DO iset = 1, this%npset - DO ig = 1, this%nsect(iset) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (this%has_missing_value) THEN - IF (pbuff(iproc)%val(iloc) == this%missing_value) THEN - CYCLE - ENDIF - ENDIF - - IF (pdata(iset) == spval) THEN - pdata(iset) = pbuff(iproc)%val(iloc) * this%areasect(iset)%val(ig) - ELSE - pdata(iset) = pdata(iset) + pbuff(iproc)%val(iloc) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - - IF (this%areapset(iset) > 0.) THEN - pdata(iset) = pdata(iset) / this%areapset(iset) - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_arealweighted_grid2pset_2d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_grid2pset_3d (this, gdata, ndim1, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(block_data_real8_3d), intent(in) :: gdata - integer, intent(in) :: ndim1 - real(r8), intent(out) :: pdata(:,:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset, i - - real(r8), allocatable :: gbuff(:,:) - type(pointer_real8_2d), allocatable :: pbuff(:) - - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (ndim1, this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(:,ig) = gdata%blk(xblk,yblk)%val(:,xloc,yloc) - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, ndim1 * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (ndim1, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, ndim1 * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - pdata(:,:) = spval - - DO iset = 1, this%npset - DO ig = 1, this%nsect(iset) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - DO i = 1, ndim1 - IF (this%has_missing_value) THEN - IF (pbuff(iproc)%val(i,iloc) == this%missing_value) THEN - CYCLE - ENDIF - ENDIF - - IF (pdata(i,iset) == spval) THEN - pdata(i,iset) = & - + pbuff(iproc)%val(i,iloc) * this%areasect(iset)%val(ig) - ELSE - pdata(i,iset) = pdata(i,iset) & - + pbuff(iproc)%val(i,iloc) * this%areasect(iset)%val(ig) - ENDIF - ENDDO - ENDDO - - IF (this%areapset(iset) > 0.) THEN - pdata(:,iset) = pdata(:,iset) / this%areapset(iset) - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_arealweighted_grid2pset_3d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_dominant_2d (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(block_data_int32_2d), intent(in) :: gdata - integer, intent(out) :: pdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - integer, allocatable :: gbuff(:) - type(pointer_int32_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (this%nsect(iset) > 0) THEN - ig = maxloc(this%areasect(iset)%val, dim=1) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - pdata(iset) = pbuff(iproc)%val(iloc) - ELSE - pdata(iset) = -9999 - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_arealweighted_dominant_2d - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_grid2sect (this, gdata, sdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - type(pointer_real8_1d), intent(out) :: sdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (this%nsect(iset) > 0) THEN - sdata(iset)%val = 0._r8 - DO ig = 1, this%nsect(iset) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - sdata(iset)%val(ig) = pbuff(iproc)%val(iloc) - ENDDO - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_arealweighted_grid2sect - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_sect2grid (this, sdata, gdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_arealweighted_type) :: this - - type(pointer_real8_1d), intent(in) :: sdata(:) - type(block_data_real8_2d), intent(inout) :: gdata - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - pbuff(iproc)%val(:) = 0.0 - ENDIF - ENDDO - - DO iset = 1, this%npset - - DO ig = 1, this%nsect(iset) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + sdata(iset)%val(ig) * this%areasect(iset)%val(ig) - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - CALL flush_block_data (gdata, 0.0_r8) - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gdata%blk(xblk,yblk)%val(xloc,yloc) = & - gdata%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE interp_arealweighted_sect2grid - - !----------------------------------------------------- - SUBROUTINE interp_arealweighted_free_mem (this) - - USE MOD_SPMD_Task - IMPLICIT NONE - - type (interp_arealweighted_type) :: this - - ! Local variables - integer :: iproc, iset - - IF (allocated (this%grid%xblk)) deallocate (this%grid%xblk) - IF (allocated (this%grid%yblk)) deallocate (this%grid%yblk) - - IF (allocated (this%grid%xloc)) deallocate (this%grid%xloc) - IF (allocated (this%grid%yloc)) deallocate (this%grid%yloc) - - IF (p_is_io) THEN - IF (allocated(this%glist)) THEN - DO iproc = 0, p_np_worker-1 - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - ENDIF - - IF (p_is_worker) THEN - IF (allocated(this%glist)) THEN - DO iproc = 0, p_np_io-1 - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - - IF (allocated(this%address)) THEN - DO iset = 1, this%npset - IF (allocated(this%address(iset)%val)) THEN - deallocate (this%address(iset)%val) - ENDIF - ENDDO - - deallocate (this%address) - ENDIF - - IF (allocated(this%areasect)) THEN - DO iset = 1, this%npset - IF (allocated(this%areasect(iset)%val)) THEN - deallocate (this%areasect(iset)%val) - ENDIF - ENDDO - - deallocate (this%areasect) - ENDIF - ENDIF - - END SUBROUTINE interp_arealweighted_free_mem - -END MODULE MOD_InterpArealWeight diff --git a/share/MOD_InterpBilinear.F90 b/share/MOD_InterpBilinear.F90 deleted file mode 100644 index 95c73dae..00000000 --- a/share/MOD_InterpBilinear.F90 +++ /dev/null @@ -1,1037 +0,0 @@ -#include - -MODULE MOD_InterpBilinear - -!----------------------------------------------------------------------- -! DESCRIPTION: -! -! Bilinear Interpolation module. -! -! Created by Shupeng Zhang, April 2024 -!----------------------------------------------------------------------- - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - IMPLICIT NONE - - !------ - type :: interp_bilinear_type - - type(grid_type) :: grid - - type(grid_list_type), allocatable :: glist (:) - - integer :: npset - integer, allocatable :: address (:,:,:) - real(r8), allocatable :: weight (:,:) - real(r8), allocatable :: areacorner(:,:) - - logical :: has_missing_value = .false. - real(r8) :: missing_value - type(block_data_real8_2d) :: areagrid - - CONTAINS - - procedure, PUBLIC :: build => interp_bilinear_build - procedure, PUBLIC :: set_missing_value => interp_bilinear_set_missing_value - - procedure, PUBLIC :: grid2pset => interp_bilinear_grid2pset_2d - procedure, PUBLIC :: grid2corner => interp_bilinear_grid2corner_2d - procedure, PUBLIC :: corner2grid => interp_bilinear_corner2grid_2d - procedure, PUBLIC :: corner2pset => interp_bilinear_corner2pset_2d - - final :: interp_bilinear_free_mem - - END type interp_bilinear_type - -!------------------------------------------------------------------- -CONTAINS - - !------------------------------------------ - SUBROUTINE interp_bilinear_build (this, fgrid, pixelset, gfilter, missing_value, pfilter) - - USE MOD_Precision - USE MOD_Namelist - USE MOD_Block - USE MOD_Pixel - USE MOD_Grid - USE MOD_DataType - USE MOD_Mesh - USE MOD_Pixelset - USE MOD_Utils - USE MOD_SPMD_Task - USE MOD_Vars_Global, only: pi - IMPLICIT NONE - - class (interp_bilinear_type) :: this - - type(grid_type), intent(in) :: fgrid - type(pixelset_type), intent(in) :: pixelset - - type(block_data_real8_2d), intent(in), optional :: gfilter - real(r8), intent(in), optional :: missing_value - logical, intent(inout), optional :: pfilter(:) - - - ! Local variables - integer, allocatable :: ys(:), yn(:), xw(:), xe(:) - integer, allocatable :: xlist(:), ylist(:), ipt(:) - - real(r8), allocatable :: rlon_pset(:), rlat_pset(:) - real(r8), allocatable :: nwgt(:), swgt(:), wwgt(:), ewgt(:) - - logical, allocatable :: msk(:) - - integer :: iset, ilat, ilon, iwest, ieast - integer :: nglist, iloc, ng, ig - integer :: iworker, iproc, iio, idest, isrc, nrecv - integer :: rmesg(2), smesg(2) - integer :: iy, ix, xblk, yblk, xloc, yloc - - real(r8) :: lon, lonw, lone, latn, lats - real(r8) :: distn, dists, distw, diste, diffw, diffe, sumwt - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (p_is_master) THEN - write(*,*) - write(*,"(A, I7, A, I7, A)") & - 'Building bilinear interpolation from grid to pixel set: ', & - fgrid%nlat, ' grids in latitude', fgrid%nlon, ' grids in longitude' - write(*,*) - ENDIF - - this%grid%nlat = fgrid%nlat - this%grid%nlon = fgrid%nlon - - allocate (this%grid%xblk (this%grid%nlon)); this%grid%xblk = fgrid%xblk - allocate (this%grid%yblk (this%grid%nlat)); this%grid%yblk = fgrid%yblk - allocate (this%grid%xloc (this%grid%nlon)); this%grid%xloc = fgrid%xloc - allocate (this%grid%yloc (this%grid%nlat)); this%grid%yloc = fgrid%yloc - - IF (p_is_worker) THEN - - allocate (this%grid%lat_s(this%grid%nlat)); this%grid%lat_s = fgrid%lat_s - allocate (this%grid%lat_n(this%grid%nlat)); this%grid%lat_n = fgrid%lat_n - allocate (this%grid%lon_w(this%grid%nlon)); this%grid%lon_w = fgrid%lon_w - allocate (this%grid%lon_e(this%grid%nlon)); this%grid%lon_e = fgrid%lon_e - allocate (this%grid%rlon (this%grid%nlon)); CALL this%grid%set_rlon () - allocate (this%grid%rlat (this%grid%nlat)); CALL this%grid%set_rlat () - - this%npset = pixelset%nset - - allocate (yn (this%npset)) - allocate (ys (this%npset)) - allocate (xw (this%npset)) - allocate (xe (this%npset)) - allocate (rlon_pset (this%npset)) - allocate (rlat_pset (this%npset)) - - CALL pixelset%get_lonlat_radian (rlon_pset, rlat_pset) - - allocate (xlist(4*this%npset)) - allocate (ylist(4*this%npset)) - - allocate (nwgt (this%npset)) - allocate (swgt (this%npset)) - allocate (wwgt (this%npset)) - allocate (ewgt (this%npset)) - - nglist = 0 - - DO iset = 1, this%npset - - IF (this%grid%rlat(1) > this%grid%rlat(this%grid%nlat)) THEN - ! from north to south - ilat = 1 - DO WHILE ((rlat_pset(iset) < this%grid%rlat(ilat)) .and. (ilat < this%grid%nlat)) - ilat = ilat + 1 - ENDDO - - IF (rlat_pset(iset) >= this%grid%rlat(ilat)) THEN - yn(iset) = max(ilat-1,1) - ys(iset) = ilat - ELSE - yn(iset) = this%grid%nlat - ys(iset) = this%grid%nlat - ENDIF - ELSE - ! from south to north - ilat = this%grid%nlat - DO WHILE ((rlat_pset(iset) < this%grid%rlat(ilat)) .and. (ilat > 1)) - ilat = ilat - 1 - ENDDO - - IF (rlat_pset(iset) >= this%grid%rlat(ilat)) THEN - yn(iset) = min(ilat+1,this%grid%nlat) - ys(iset) = ilat - ELSE - yn(iset) = 1 - ys(iset) = 1 - ENDIF - ENDIF - - IF (yn(iset) /= ys(iset)) THEN - latn = this%grid%rlat(yn(iset)) - lats = this%grid%rlat(ys(iset)) - distn = arclen(rlat_pset(iset), rlon_pset(iset), latn, rlon_pset(iset)) - dists = arclen(rlat_pset(iset), rlon_pset(iset), lats, rlon_pset(iset)) - nwgt(iset) = dists/(dists+distn) - swgt(iset) = distn/(dists+distn) - ELSE - nwgt(iset) = 1.0 - swgt(iset) = 0.0 - ENDIF - - - lon = rlon_pset(iset)*180.0/pi - CALL normalize_longitude (lon) - - DO iwest = 1, this%grid%nlon - lonw = this%grid%rlon(iwest) *180.0/pi - CALL normalize_longitude (lonw) - - ieast = mod(iwest,this%grid%nlon) + 1 - lone = this%grid%rlon(ieast)*180.0/pi - CALL normalize_longitude (lone) - - IF (lon_between_floor(lon, lonw, lone)) EXIT - ENDDO - - xw(iset) = iwest - xe(iset) = ieast - - ! for the case grid does not cover [-180,180) - IF ((iwest == this%grid%nlon) .and. (this%grid%nlon > 1)) THEN - IF (lon_between_floor( & - this%grid%lon_e(this%grid%nlon), lonw, this%grid%lon_w(1))) THEN - - diffw = lon - lonw; IF (diffw < 0) diffw = diffw + 360.0 - diffe = lone - lon; IF (diffe < 0) diffe = diffe + 360.0 - - IF (diffw > diffe) THEN - xw(iset) = ieast - xe(iset) = ieast - ELSE - xw(iset) = iwest - xe(iset) = iwest - ENDIF - - ENDIF - ENDIF - - IF (xw(iset) /= xe(iset)) THEN - lonw = this%grid%rlon(xw(iset)) - lone = this%grid%rlon(xe(iset)) - distw = arclen(rlat_pset(iset), rlon_pset(iset), rlat_pset(iset), lonw) - diste = arclen(rlat_pset(iset), rlon_pset(iset), rlat_pset(iset), lone) - wwgt(iset) = diste/(distw+diste) - ewgt(iset) = distw/(distw+diste) - ELSE - wwgt(iset) = 1.0 - ewgt(iset) = 0.0 - ENDIF - - CALL insert_into_sorted_list2 ( xw(iset), yn(iset), nglist, xlist, ylist, iloc) - CALL insert_into_sorted_list2 ( xe(iset), yn(iset), nglist, xlist, ylist, iloc) - CALL insert_into_sorted_list2 ( xw(iset), ys(iset), nglist, xlist, ylist, iloc) - CALL insert_into_sorted_list2 ( xe(iset), ys(iset), nglist, xlist, ylist, iloc) - - ENDDO - -#ifdef USEMPI - allocate (ipt (nglist)) - allocate (msk (nglist)) - DO ig = 1, nglist - xblk = this%grid%xblk(xlist(ig)) - yblk = this%grid%yblk(ylist(ig)) - ipt(ig) = gblock%pio(xblk,yblk) - ENDDO -#endif - - allocate (this%glist (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 -#ifdef USEMPI - msk = (ipt == p_address_io(iproc)) - ng = count(msk) -#else - ng = nglist -#endif - - IF (ng > 0) THEN - allocate (this%glist(iproc)%ilat (ng)) - allocate (this%glist(iproc)%ilon (ng)) - ENDIF - - this%glist(iproc)%ng = 0 - ENDDO - - DO ig = 1, nglist -#ifdef USEMPI - iproc = p_itis_io(ipt(ig)) -#else - iproc = 0 -#endif - this%glist(iproc)%ng = this%glist(iproc)%ng + 1 - - ng = this%glist(iproc)%ng - this%glist(iproc)%ilon(ng) = xlist(ig) - this%glist(iproc)%ilat(ng) = ylist(ig) - ENDDO - - deallocate (xlist) - deallocate (ylist) - -#ifdef USEMPI - deallocate (ipt) - deallocate (msk) -#endif - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - idest = p_address_io(iproc) - smesg = (/p_iam_glb, this%glist(iproc)%ng/) - - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iproc)%ng > 0) THEN - CALL mpi_send (this%glist(iproc)%ilon, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iproc)%ilat, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO - ENDIF - - IF (p_is_io) THEN - - allocate (this%glist (0:p_np_worker-1)) - - DO iworker = 0, p_np_worker-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_worker(isrc) - - this%glist(iproc)%ng = nrecv - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - - ENDDO - - ENDIF - - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (present(missing_value)) THEN - - IF (p_is_io) THEN - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (msk (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - msk(ig) = gfilter%blk(xblk,yblk)%val(xloc,yloc) /= missing_value - ENDDO - - IF (any(.not. msk)) THEN - - this%glist(iproc)%ng = count(msk) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (xlist(this%glist(iproc)%ng)) - allocate (ylist(this%glist(iproc)%ng)) - xlist = pack(this%glist(iproc)%ilon, mask=msk) - ylist = pack(this%glist(iproc)%ilat, mask=msk) - ENDIF - - deallocate (this%glist(iproc)%ilon) - deallocate (this%glist(iproc)%ilat) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (this%glist(iproc)%ilon(this%glist(iproc)%ng)) - allocate (this%glist(iproc)%ilat(this%glist(iproc)%ng)) - this%glist(iproc)%ilon = xlist - this%glist(iproc)%ilat = ylist - ENDIF - - IF (allocated(xlist)) deallocate(xlist) - IF (allocated(ylist)) deallocate(ylist) - ENDIF - - deallocate(msk) - ENDIF - ENDDO - ENDIF - -#ifdef USEMPI - IF (p_is_io) THEN - DO iworker = 0, p_np_worker-1 - - idest = p_address_worker(iworker) - smesg = (/p_iam_glb, this%glist(iworker)%ng/) - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iworker)%ng > 0) THEN - CALL mpi_send (this%glist(iworker)%ilon, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iworker)%ilat, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO - ENDIF - - IF (p_is_worker) THEN - DO iio = 0, p_np_io-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_io(isrc) - - this%glist(iproc)%ng = nrecv - - IF (allocated(this%glist(iproc)%ilon)) deallocate(this%glist(iproc)%ilon) - IF (allocated(this%glist(iproc)%ilat)) deallocate(this%glist(iproc)%ilat) - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - ENDDO - ENDIF - - CALL mpi_barrier (p_comm_glb, p_err) -#endif - ENDIF - - IF (p_is_worker) THEN - - allocate (this%address (2,4,this%npset)) - allocate (this%weight (4,this%npset)) - - DO iset = 1, pixelset%nset - - ! northwest grid -#ifdef USEMPI - ix = xw(iset); xblk = this%grid%xblk(ix) - iy = yn(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,1,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,1,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,1,iset) = -1 - ENDIF - - IF (this%address(2,1,iset) > 0) THEN - this%weight(1,iset) = nwgt(iset) * wwgt(iset) - ELSE - this%weight(1,iset) = 0 - ENDIF - - ! northeast grid -#ifdef USEMPI - ix = xe(iset); xblk = this%grid%xblk(ix) - iy = yn(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,2,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,2,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,2,iset) = -1 - ENDIF - - IF (this%address(2,2,iset) > 0) THEN - this%weight(2,iset) = nwgt(iset) * ewgt(iset) - ELSE - this%weight(2,iset) = 0 - ENDIF - - ! southwest -#ifdef USEMPI - ix = xw(iset); xblk = this%grid%xblk(ix) - iy = ys(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,3,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,3,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,3,iset) = -1 - ENDIF - - IF (this%address(2,3,iset) > 0) THEN - this%weight(3,iset) = swgt(iset) * wwgt(iset) - ELSE - this%weight(3,iset) = 0 - ENDIF - - ! southeast -#ifdef USEMPI - ix = xe(iset); xblk = this%grid%xblk(ix) - iy = ys(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,4,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,4,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,4,iset) = -1 - ENDIF - - IF (this%address(2,4,iset) > 0) THEN - this%weight(4,iset) = swgt(iset) * ewgt(iset) - ELSE - this%weight(4,iset) = 0 - ENDIF - - sumwt = sum(this%weight(:,iset)) - IF (sumwt > 0) THEN - this%weight(:,iset) = this%weight(:,iset) / sumwt - ENDIF - - ENDDO - - IF (present(pfilter)) THEN - pfilter = sum(this%weight, dim=1) > 0 - ENDIF - - ENDIF - - IF (allocated(this%grid%lat_s)) deallocate(this%grid%lat_s) - IF (allocated(this%grid%lat_n)) deallocate(this%grid%lat_n) - IF (allocated(this%grid%lon_w)) deallocate(this%grid%lon_w) - IF (allocated(this%grid%lon_e)) deallocate(this%grid%lon_e) - IF (allocated(this%grid%rlon )) deallocate(this%grid%rlon ) - IF (allocated(this%grid%rlat )) deallocate(this%grid%rlat ) - - IF (allocated(yn)) deallocate(yn) - IF (allocated(ys)) deallocate(ys) - IF (allocated(xw)) deallocate(xw) - IF (allocated(xe)) deallocate(xe) - - IF (allocated(rlon_pset)) deallocate(rlon_pset) - IF (allocated(rlat_pset)) deallocate(rlat_pset) - - IF (allocated(nwgt)) deallocate(nwgt) - IF (allocated(swgt)) deallocate(swgt) - IF (allocated(wwgt)) deallocate(wwgt) - IF (allocated(ewgt)) deallocate(ewgt) - - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - END SUBROUTINE interp_bilinear_build - - !----------------------------------------------------- - SUBROUTINE interp_bilinear_set_missing_value (this, gdata, missing_value, pmask) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (interp_bilinear_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(in) :: missing_value - logical, intent(inout) :: pmask(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - - IF (p_is_io) THEN - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (msk (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - - IF (any(.not. msk)) THEN - - this%glist(iproc)%ng = count(msk) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (xlist(this%glist(iproc)%ng)) - allocate (ylist(this%glist(iproc)%ng)) - xlist = pack(this%glist(iproc)%ilon, mask=msk) - ylist = pack(this%glist(iproc)%ilat, mask=msk) - ENDIF - - deallocate (this%glist(iproc)%ilon) - deallocate (this%glist(iproc)%ilat) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (this%glist(iproc)%ilon(this%glist(iproc)%ng)) - allocate (this%glist(iproc)%ilat(this%glist(iproc)%ng)) - this%glist(iproc)%ilon = xlist - this%glist(iproc)%ilat = ylist - ENDIF - - IF (allocated(xlist)) deallocate(xlist) - IF (allocated(ylist)) deallocate(ylist) - ENDIF - - deallocate(msk) - ENDIF - ENDDO - ENDIF - -#ifdef USEMPI - IF (p_is_io) THEN - DO iworker = 0, p_np_worker-1 - - idest = p_address_worker(iworker) - smesg = (/p_iam_glb, this%glist(iworker)%ng/) - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iworker)%ng > 0) THEN - CALL mpi_send (this%glist(iworker)%ilon, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iworker)%ilat, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO - ENDIF - - IF (p_is_worker) THEN - DO iio = 0, p_np_io-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_io(isrc) - - this%glist(iproc)%ng = nrecv - - IF (allocated(this%glist(iproc)%ilon)) deallocate(this%glist(iproc)%ilon) - IF (allocated(this%glist(iproc)%ilat)) deallocate(this%glist(iproc)%ilat) - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - ENDDO - ENDIF - - CALL mpi_barrier (p_comm_glb, p_err) -#endif - ENDIF - - IF (p_is_worker) THEN - - allocate (this%address (2,4,this%npset)) - allocate (this%weight (4,this%npset)) - - DO iset = 1, pixelset%nset - - ! northwest grid -#ifdef USEMPI - ix = xw(iset); xblk = this%grid%xblk(ix) - iy = yn(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,1,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,1,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,1,iset) = -1 - ENDIF - - IF (this%address(2,1,iset) > 0) THEN - this%weight(1,iset) = nwgt(iset) * wwgt(iset) - ELSE - this%weight(1,iset) = 0 - ENDIF - - ! northeast grid -#ifdef USEMPI - ix = xe(iset); xblk = this%grid%xblk(ix) - iy = yn(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,2,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,2,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,2,iset) = -1 - ENDIF - - IF (this%address(2,2,iset) > 0) THEN - this%weight(2,iset) = nwgt(iset) * ewgt(iset) - ELSE - this%weight(2,iset) = 0 - ENDIF - - ! southwest -#ifdef USEMPI - ix = xw(iset); xblk = this%grid%xblk(ix) - iy = ys(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,3,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,3,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,3,iset) = -1 - ENDIF - - IF (this%address(2,3,iset) > 0) THEN - this%weight(3,iset) = swgt(iset) * wwgt(iset) - ELSE - this%weight(3,iset) = 0 - ENDIF - - ! southeast -#ifdef USEMPI - ix = xe(iset); xblk = this%grid%xblk(ix) - iy = ys(iset); yblk = this%grid%yblk(iy) - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(1,4,iset) = iproc - IF (this%glist(iproc)%ng > 0) THEN - this%address(2,4,iset) = find_in_sorted_list2 ( ix, iy, & - this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ELSE - this%address(2,4,iset) = -1 - ENDIF - - IF (this%address(2,4,iset) > 0) THEN - this%weight(4,iset) = swgt(iset) * ewgt(iset) - ELSE - this%weight(4,iset) = 0 - ENDIF - - sumwt = sum(this%weight(:,iset)) - IF (sumwt > 0) THEN - this%weight(:,iset) = this%weight(:,iset) / sumwt - ENDIF - - ENDDO - - IF (present(pfilter)) THEN - pfilter = sum(this%weight, dim=1) > 0 - ENDIF - - ENDIF - END SUBROUTINE interp_bilinear_set_missing_value - - !----------------------------------------------------- - SUBROUTINE interp_bilinear_interp_2d (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_bilinear_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(out) :: pdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - pdata(iset) = spval - DO ig = 1, 4 - iproc = this%address(1,ig,iset) - iloc = this%address(2,ig,iset) - IF (iloc > 0) THEN - IF (pdata(iset) == spval) THEN - pdata(iset) = pbuff(iproc)%val(iloc) * this%weight(ig,iset) - ELSE - pdata(iset) = pdata(iset) + pbuff(iproc)%val(iloc) * this%weight(ig,iset) - ENDIF - ENDIF - ENDDO - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_bilinear_interp_2d - - - !----------------------------------------------------- - SUBROUTINE interp_bilinear_get_corners (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (interp_bilinear_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(out) :: pdata(:,:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - pdata(:,:) = spval - DO iset = 1, this%npset - DO ig = 1, 4 - iproc = this%address(1,ig,iset) - iloc = this%address(2,ig,iset) - IF (iloc > 0) THEN - pdata(ig,iset) = pbuff(iproc)%val(iloc) - ENDIF - ENDDO - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE interp_bilinear_get_corners - - !----------------------------------------------------- - SUBROUTINE interp_bilinear_free_mem (this) - - USE MOD_SPMD_Task - IMPLICIT NONE - - type(interp_bilinear_type) :: this - - ! Local variables - integer :: iproc - - IF (allocated (this%grid%xblk)) deallocate (this%grid%xblk) - IF (allocated (this%grid%yblk)) deallocate (this%grid%yblk) - - IF (allocated (this%grid%xloc)) deallocate (this%grid%xloc) - IF (allocated (this%grid%yloc)) deallocate (this%grid%yloc) - - IF (allocated(this%glist)) THEN - DO iproc = lbound(this%glist,1), ubound(this%glist,1) - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - - IF (allocated(this%address)) deallocate (this%address) - IF (allocated(this%weight )) deallocate (this%weight ) - - END SUBROUTINE interp_bilinear_free_mem - -END MODULE MOD_InterpBilinear diff --git a/share/MOD_Mapping_Grid2Pset.F90 b/share/MOD_Mapping_Grid2Pset.F90 deleted file mode 100644 index 3f306b72..00000000 --- a/share/MOD_Mapping_Grid2Pset.F90 +++ /dev/null @@ -1,905 +0,0 @@ -#include - -MODULE MOD_Mapping_Grid2Pset - -!----------------------------------------------------------------------- -! DESCRIPTION: -! -! Mapping data types and subroutines from gridded data to vector data -! defined on pixelset. -! -! Notice that: -! 1. A mapping can be built with method mapping%build. -! 2. Area weighted mapping is carried out. -! 3. For 2D gridded data, dimensions are from [lon,lat] to [vector]. -! 4. For 3D gridded data, dimensions are from [d,lon,lat] to [d,vector]. -! -! Created by Shupeng Zhang, May 2023 -!----------------------------------------------------------------------- - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - IMPLICIT NONE - - !------ - type :: mapping_grid2pset_type - - type(grid_type) :: grid - integer :: npset - - type(grid_list_type), allocatable :: glist (:) - - type(pointer_int32_2d), allocatable :: address(:) - type(pointer_real8_1d), allocatable :: gweight(:) - - CONTAINS - - procedure, PUBLIC :: build => mapping_grid2pset_build - - procedure, PRIVATE :: map_aweighted_2d => map_g2p_aweighted_2d - procedure, PRIVATE :: map_aweighted_3d => map_g2p_aweighted_3d - generic, PUBLIC :: map_aweighted => map_aweighted_2d, map_aweighted_3d - - procedure, PUBLIC :: map_max_frequency_2d => map_g2p_max_frequency_2d - - final :: mapping_grid2pset_free_mem - - END type mapping_grid2pset_type - -!------------------------------------------------------------------- -CONTAINS - - !------------------------------------------ - SUBROUTINE mapping_grid2pset_build (this, fgrid, pixelset, gfilter, missing_value, pfilter) - - USE MOD_Precision - USE MOD_Namelist - USE MOD_Block - USE MOD_Pixel - USE MOD_Grid - USE MOD_DataType - USE MOD_Mesh - USE MOD_Pixelset - USE MOD_Utils - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_grid2pset_type) :: this - - type(grid_type), intent(in) :: fgrid - type(pixelset_type), intent(in) :: pixelset - - type(block_data_real8_2d), intent(in), optional :: gfilter - real(r8), intent(in), optional :: missing_value - logical, intent(inout), optional :: pfilter(:) - - - ! Local variables - type(pointer_real8_1d), allocatable :: afrac(:) - type(grid_list_type), allocatable :: gfrom(:) - type(pointer_int32_1d), allocatable :: list_lat(:) - integer, allocatable :: ng_lat(:) - integer, allocatable :: ys(:), yn(:), xw(:), xe(:) - integer, allocatable :: xlist(:), ylist(:) - integer, allocatable :: ipt(:) - logical, allocatable :: msk(:) - - integer :: ie, iset - integer :: ng, ig, ng_all, iloc, ng0 - integer :: npxl, ipxl, ilat, ilon - integer :: iworker, iproc, iio, idest, isrc, nrecv - integer :: rmesg(2), smesg(2) - integer :: iy, ix, xblk, yblk, xloc, yloc - real(r8) :: lat_s, lat_n, lon_w, lon_e, area - logical :: is_new - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (p_is_master) THEN - write(*,"('Making mapping from grid to pixel set: ', I7, A, I7, A)") & - fgrid%nlat, ' grids in latitude', fgrid%nlon, ' grids in longitude' - ENDIF - - IF (allocated(this%grid%xblk)) deallocate(this%grid%xblk) - IF (allocated(this%grid%yblk)) deallocate(this%grid%yblk) - IF (allocated(this%grid%xloc)) deallocate(this%grid%xloc) - IF (allocated(this%grid%yloc)) deallocate(this%grid%yloc) - allocate (this%grid%xblk (size(fgrid%xblk))) - allocate (this%grid%yblk (size(fgrid%yblk))) - allocate (this%grid%xloc (size(fgrid%xloc))) - allocate (this%grid%yloc (size(fgrid%yloc))) - - this%grid%xblk = fgrid%xblk - this%grid%yblk = fgrid%yblk - this%grid%xloc = fgrid%xloc - this%grid%yloc = fgrid%yloc - - this%npset = pixelset%nset - - IF (p_is_worker) THEN - - allocate (afrac (pixelset%nset)) - allocate (gfrom (pixelset%nset)) - - allocate (ys (pixel%nlat)) - allocate (yn (pixel%nlat)) - allocate (xw (pixel%nlon)) - allocate (xe (pixel%nlon)) - - DO ilat = 1, pixel%nlat - ys(ilat) = find_nearest_south (pixel%lat_s(ilat), fgrid%nlat, fgrid%lat_s) - yn(ilat) = find_nearest_north (pixel%lat_n(ilat), fgrid%nlat, fgrid%lat_n) - ENDDO - - DO ilon = 1, pixel%nlon - xw(ilon) = find_nearest_west (pixel%lon_w(ilon), fgrid%nlon, fgrid%lon_w) - xe(ilon) = find_nearest_east (pixel%lon_e(ilon), fgrid%nlon, fgrid%lon_e) - ENDDO - - allocate (list_lat (fgrid%nlat)) - DO iy = 1, fgrid%nlat - allocate (list_lat(iy)%val (100)) - ENDDO - - allocate (ng_lat (fgrid%nlat)) - ng_lat(:) = 0 - - DO iset = 1, pixelset%nset - - ie = pixelset%ielm(iset) - npxl = pixelset%ipxend(iset) - pixelset%ipxstt(iset) + 1 - - allocate (afrac(iset)%val (npxl)) - allocate (gfrom(iset)%ilat(npxl)) - allocate (gfrom(iset)%ilon(npxl)) - - gfrom(iset)%ng = 0 - - DO ipxl = pixelset%ipxstt(iset), pixelset%ipxend(iset) - - ilat = mesh(ie)%ilat(ipxl) - ilon = mesh(ie)%ilon(ipxl) - - DO iy = ys(ilat), yn(ilat), fgrid%yinc - - lat_s = max(fgrid%lat_s(iy), pixel%lat_s(ilat)) - lat_n = min(fgrid%lat_n(iy), pixel%lat_n(ilat)) - - IF ((lat_n-lat_s) < 1.0e-6_r8) THEN - CYCLE - ENDIF - - ix = xw(ilon) - DO WHILE (.true.) - - IF (ix == xw(ilon)) THEN - lon_w = pixel%lon_w(ilon) - ELSE - lon_w = fgrid%lon_w(ix) - ENDIF - - IF (ix == xe(ilon)) THEN - lon_e = pixel%lon_e(ilon) - ELSE - lon_e = fgrid%lon_e(ix) - ENDIF - - IF (lon_e > lon_w) THEN - IF ((lon_e-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ELSE - IF ((lon_e+360.0_r8-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ENDIF - - area = areaquad (lat_s, lat_n, lon_w, lon_e) - - CALL insert_into_sorted_list2 ( ix, iy, & - gfrom(iset)%ng, gfrom(iset)%ilon, gfrom(iset)%ilat, & - iloc, is_new) - - IF (is_new) THEN - IF (iloc < gfrom(iset)%ng) THEN - afrac(iset)%val(iloc+1:gfrom(iset)%ng) & - = afrac(iset)%val(iloc:gfrom(iset)%ng-1) - ENDIF - - afrac(iset)%val(iloc) = area - ELSE - afrac(iset)%val(iloc) = afrac(iset)%val(iloc) + area - ENDIF - - IF (gfrom(iset)%ng == size(gfrom(iset)%ilat)) THEN - CALL expand_list (gfrom(iset)%ilat, 0.2_r8) - CALL expand_list (gfrom(iset)%ilon, 0.2_r8) - CALL expand_list (afrac(iset)%val, 0.2_r8) - ENDIF - - CALL insert_into_sorted_list1 ( & - ix, ng_lat(iy), list_lat(iy)%val, iloc) - - IF (ng_lat(iy) == size(list_lat(iy)%val)) THEN - CALL expand_list (list_lat(iy)%val, 0.2_r8) - ENDIF - - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - ENDDO - ENDDO - - ENDDO - ENDDO - - deallocate (ys) - deallocate (yn) - deallocate (xw) - deallocate (xe) - - ng_all = sum(ng_lat) - allocate (xlist(ng_all)) - allocate (ylist(ng_all)) - - ig = 0 - DO iy = 1, fgrid%nlat - DO ix = 1, ng_lat(iy) - ig = ig + 1 - xlist(ig) = list_lat(iy)%val(ix) - ylist(ig) = iy - ENDDO - ENDDO - - deallocate (ng_lat) - DO iy = 1, fgrid%nlat - deallocate (list_lat(iy)%val) - ENDDO - deallocate (list_lat) - -#ifdef USEMPI - allocate (ipt (ng_all)) - allocate (msk (ng_all)) - DO ig = 1, ng_all - xblk = fgrid%xblk(xlist(ig)) - yblk = fgrid%yblk(ylist(ig)) - ipt(ig) = gblock%pio(xblk,yblk) - ENDDO -#endif - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 -#ifdef USEMPI - msk = (ipt == p_address_io(iproc)) - ng = count(msk) -#else - ng = ng_all -#endif - - allocate (this%glist(iproc)%ilat (ng)) - allocate (this%glist(iproc)%ilon (ng)) - - this%glist(iproc)%ng = 0 - ENDDO - - DO ig = 1, ng_all -#ifdef USEMPI - iproc = p_itis_io(ipt(ig)) -#else - iproc = 0 -#endif - this%glist(iproc)%ng = this%glist(iproc)%ng + 1 - - ng = this%glist(iproc)%ng - this%glist(iproc)%ilon(ng) = xlist(ig) - this%glist(iproc)%ilat(ng) = ylist(ig) - ENDDO - - deallocate (xlist) - deallocate (ylist) - -#ifdef USEMPI - deallocate (ipt) - deallocate (msk) -#endif - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - idest = p_address_io(iproc) - smesg = (/p_iam_glb, this%glist(iproc)%ng/) - - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iproc)%ng > 0) THEN - CALL mpi_send (this%glist(iproc)%ilon, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iproc)%ilat, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO - ENDIF - - IF (p_is_io) THEN - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_worker-1)) - - DO iworker = 0, p_np_worker-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_worker(isrc) - - this%glist(iproc)%ng = nrecv - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - - ENDDO - - ENDIF - - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (present(missing_value)) THEN - - IF (p_is_io) THEN - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (msk (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - msk(ig) = gfilter%blk(xblk,yblk)%val(xloc,yloc) /= missing_value - ENDDO - - IF (any(.not. msk)) THEN - - this%glist(iproc)%ng = count(msk) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (xlist(this%glist(iproc)%ng)) - allocate (ylist(this%glist(iproc)%ng)) - xlist = pack(this%glist(iproc)%ilon, mask=msk) - ylist = pack(this%glist(iproc)%ilat, mask=msk) - ENDIF - - deallocate (this%glist(iproc)%ilon) - deallocate (this%glist(iproc)%ilat) - - IF (this%glist(iproc)%ng > 0) THEN - allocate (this%glist(iproc)%ilon(this%glist(iproc)%ng)) - allocate (this%glist(iproc)%ilat(this%glist(iproc)%ng)) - this%glist(iproc)%ilon = xlist - this%glist(iproc)%ilat = ylist - ENDIF - - IF (allocated(xlist)) deallocate(xlist) - IF (allocated(ylist)) deallocate(ylist) - ENDIF - - deallocate(msk) - ENDIF - ENDDO - ENDIF - -#ifdef USEMPI - IF (p_is_io) THEN - DO iworker = 0, p_np_worker-1 - - idest = p_address_worker(iworker) - smesg = (/p_iam_glb, this%glist(iworker)%ng/) - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iworker)%ng > 0) THEN - CALL mpi_send (this%glist(iworker)%ilon, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iworker)%ilat, this%glist(iworker)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO - ENDIF - - IF (p_is_worker) THEN - DO iio = 0, p_np_io-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_io(isrc) - - this%glist(iproc)%ng = nrecv - - IF (allocated(this%glist(iproc)%ilon)) deallocate(this%glist(iproc)%ilon) - IF (allocated(this%glist(iproc)%ilat)) deallocate(this%glist(iproc)%ilat) - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - ENDDO - ENDIF - - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (p_is_worker) THEN - DO iset = 1, pixelset%nset - - allocate (msk(gfrom(iset)%ng)) - - DO ig = 1, gfrom(iset)%ng - ilon = gfrom(iset)%ilon(ig) - ilat = gfrom(iset)%ilat(ig) - xblk = fgrid%xblk(ilon) - yblk = fgrid%yblk(ilat) -#ifdef USEMPI - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - msk(ig) = find_in_sorted_list2 (ilon, ilat, this%glist(iproc)%ng, & - this%glist(iproc)%ilon, this%glist(iproc)%ilat) > 0 - - ENDDO - - IF (present(pfilter)) THEN - pfilter(iset) = any(msk) - ENDIF - - ng0 = gfrom(iset)%ng - gfrom(iset)%ng = count(msk) - IF (any(msk) .and. any(.not. msk)) THEN - ng = gfrom(iset)%ng - gfrom(iset)%ilon(1:ng) = pack(gfrom(iset)%ilon(1:ng0), mask = msk) - gfrom(iset)%ilat(1:ng) = pack(gfrom(iset)%ilat(1:ng0), mask = msk) - afrac(iset)%val (1:ng) = pack(afrac(iset)%val (1:ng0), mask = msk) - ENDIF - - deallocate (msk) - ENDDO - ENDIF - ENDIF - - IF (p_is_worker) THEN - - IF (allocated(this%address)) deallocate(this%address) - IF (allocated(this%gweight)) deallocate(this%gweight) - allocate (this%address (pixelset%nset)) - allocate (this%gweight (pixelset%nset)) - - DO iset = 1, pixelset%nset - - ng = gfrom(iset)%ng - IF (ng > 0) THEN - allocate (this%address(iset)%val (2,ng)) - allocate (this%gweight(iset)%val (ng)) - - IF (sum(afrac(iset)%val(1:ng)) < 1.0e-12) THEN - this%gweight(iset)%val = 1.0_r8 / ng - ELSE - this%gweight(iset)%val & - = afrac(iset)%val(1:ng) / sum(afrac(iset)%val(1:ng)) - ENDIF - - DO ig = 1, gfrom(iset)%ng - ilon = gfrom(iset)%ilon(ig) - ilat = gfrom(iset)%ilat(ig) - xblk = fgrid%xblk(ilon) - yblk = fgrid%yblk(ilat) - -#ifdef USEMPI - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - this%address(iset)%val(1,ig) = iproc - this%address(iset)%val(2,ig) = find_in_sorted_list2 ( & - ilon, ilat, this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ENDDO - ENDIF - ENDDO - - DO iset = 1, pixelset%nset - deallocate (afrac(iset)%val ) - deallocate (gfrom(iset)%ilon) - deallocate (gfrom(iset)%ilat) - ENDDO - - deallocate (afrac) - deallocate (gfrom) - - ENDIF - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - END SUBROUTINE mapping_grid2pset_build - - !----------------------------------------------------- - SUBROUTINE map_g2p_aweighted_2d (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (mapping_grid2pset_type) :: this - - type(block_data_real8_2d), intent(in) :: gdata - real(r8), intent(out) :: pdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (allocated(this%gweight(iset)%val)) THEN - pdata(iset) = 0._r8 - DO ig = 1, size(this%gweight(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - pdata(iset) = pdata(iset) & - + pbuff(iproc)%val(iloc) * this%gweight(iset)%val(ig) - ENDDO - ELSE - pdata(iset) = spval - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE map_g2p_aweighted_2d - - !----------------------------------------------------- - SUBROUTINE map_g2p_aweighted_3d (this, gdata, ndim1, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (mapping_grid2pset_type) :: this - - type(block_data_real8_3d), intent(in) :: gdata - integer, intent(in) :: ndim1 - real(r8), intent(out) :: pdata(:,:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:,:) - type(pointer_real8_2d), allocatable :: pbuff(:) - - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (ndim1, this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(:,ig) = gdata%blk(xblk,yblk)%val(:,xloc,yloc) - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, ndim1 * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (ndim1, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, ndim1 * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (allocated(this%gweight(iset)%val)) THEN - pdata(:,iset) = 0._r8 - DO ig = 1, size(this%gweight(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - pdata(:,iset) = pdata(:,iset) & - + pbuff(iproc)%val(:,iloc) * this%gweight(iset)%val(ig) - ENDDO - ELSE - pdata(:,iset) = spval - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE map_g2p_aweighted_3d - - !----------------------------------------------------- - SUBROUTINE map_g2p_max_frequency_2d (this, gdata, pdata) - - USE MOD_Precision - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_SPMD_Task - USE MOD_Vars_Global, only : spval - IMPLICIT NONE - - class (mapping_grid2pset_type) :: this - - type(block_data_int32_2d), intent(in) :: gdata - integer, intent(out) :: pdata(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - integer, allocatable :: gbuff(:) - type(pointer_int32_1d), allocatable :: pbuff(:) - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gbuff(ig) = gdata%blk(xblk,yblk)%val(xloc,yloc) - - ENDDO - -#ifdef USEMPI - idest = p_address_worker(iproc) - CALL mpi_send (gbuff, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - - deallocate (gbuff) -#endif - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_io(iproc) - CALL mpi_recv (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - pbuff(0)%val = gbuff - deallocate (gbuff) -#endif - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (allocated(this%gweight(iset)%val)) THEN - ig = maxloc(this%gweight(iset)%val, dim=1) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - pdata(iset) = pbuff(iproc)%val(iloc) - ELSE - pdata(iset) = -9999 - ENDIF - ENDDO - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - - deallocate (pbuff) - - ENDIF - - END SUBROUTINE map_g2p_max_frequency_2d - - !----------------------------------------------------- - SUBROUTINE mapping_grid2pset_free_mem (this) - - USE MOD_SPMD_Task - IMPLICIT NONE - - type(mapping_grid2pset_type) :: this - - ! Local variables - integer :: iproc, iset - - IF (allocated (this%grid%xblk)) deallocate (this%grid%xblk) - IF (allocated (this%grid%yblk)) deallocate (this%grid%yblk) - - IF (allocated (this%grid%xloc)) deallocate (this%grid%xloc) - IF (allocated (this%grid%yloc)) deallocate (this%grid%yloc) - - IF (allocated(this%glist)) THEN - DO iproc = lbound(this%glist,1), ubound(this%glist,1) - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - - IF (allocated(this%address)) THEN - DO iset = 1, this%npset - IF (allocated(this%address(iset)%val)) THEN - deallocate (this%address(iset)%val) - ENDIF - ENDDO - - deallocate (this%address) - ENDIF - - IF (allocated(this%gweight)) THEN - DO iset = 1, this%npset - IF (allocated(this%gweight(iset)%val)) THEN - deallocate (this%gweight(iset)%val) - ENDIF - ENDDO - - deallocate (this%gweight) - ENDIF - - END SUBROUTINE mapping_grid2pset_free_mem - -END MODULE MOD_Mapping_Grid2Pset diff --git a/share/MOD_Mapping_Pset2Grid.F90 b/share/MOD_Mapping_Pset2Grid.F90 deleted file mode 100644 index e3279614..00000000 --- a/share/MOD_Mapping_Pset2Grid.F90 +++ /dev/null @@ -1,1079 +0,0 @@ -#include - -MODULE MOD_Mapping_Pset2Grid - -!---------------------------------------------------------------------------- -! DESCRIPTION: -! -! Mapping data types and subroutines from vector data defined on pixelsets -! to gridded data. -! -! Notice that: -! 1. A mapping can be built with method mapping%build. -! 2. Overloaded method "map" can map 1D, 2D or 3D vector data to gridded data -! by using area weighted scheme. -! 3. Method "map_split" can split data in a vector according to pixelset type -! and map data to 3D gridded data. -! The dimensions are from [vector] to [type,lon,lat]. -! -! Created by Shupeng Zhang, May 2023 -!---------------------------------------------------------------------------- - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - IMPLICIT NONE - - ! ------ - type :: mapping_pset2grid_type - - type(grid_type) :: grid - integer :: npset - - type(grid_list_type), allocatable :: glist (:) - - type(pointer_int32_2d), allocatable :: address(:) - type(pointer_real8_1d), allocatable :: olparea(:) ! overlapping area - - CONTAINS - - procedure, PUBLIC :: build => mapping_pset2grid_build - - procedure, PRIVATE :: map_2d => map_p2g_2d - procedure, PRIVATE :: map_3d => map_p2g_3d - procedure, PRIVATE :: map_4d => map_p2g_4d - generic, PUBLIC :: map => map_2d, map_3d, map_4d - - procedure, PUBLIC :: map_split => map_p2g_split_to_3d - - final :: mapping_pset2grid_free_mem - - END type mapping_pset2grid_type - -!----------------------- -CONTAINS - - !------------------------------------------ - SUBROUTINE mapping_pset2grid_build (this, pixelset, fgrid) - - USE MOD_Precision - USE MOD_Namelist - USE MOD_Block - USE MOD_Pixel - USE MOD_Grid - USE MOD_Pixelset - USE MOD_DataType - USE MOD_Mesh - USE MOD_Utils - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_pset2grid_type) :: this - - type(pixelset_type), intent(in) :: pixelset - type(grid_type), intent(in) :: fgrid - - ! Local variables - type(pointer_real8_1d), allocatable :: afrac(:) - type(grid_list_type), allocatable :: gfrom(:) - type(pointer_int32_1d), allocatable :: list_lat(:) - integer, allocatable :: ng_lat(:) - integer, allocatable :: ys(:), yn(:), xw(:), xe(:) - integer, allocatable :: xlist(:), ylist(:) - integer, allocatable :: ipt(:) - logical, allocatable :: msk(:) - - integer :: ie, iset - integer :: ng, ig, ng_all, iloc - integer :: npxl, ipxl, ilat, ilon - integer :: iworker, iproc, idest, isrc, nrecv - integer :: rmesg(2), smesg(2) - integer :: iy, ix, xblk, yblk, xloc, yloc - real(r8) :: lat_s, lat_n, lon_w, lon_e, area - logical :: is_new - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - IF (p_is_master) THEN - write(*,"('Making mapping from pixel set to grid: ', I7, A, I7, A)") & - fgrid%nlat, ' grids in latitude', fgrid%nlon, ' grids in longitude' - ENDIF - - IF (allocated(this%grid%xblk)) deallocate(this%grid%xblk) - IF (allocated(this%grid%yblk)) deallocate(this%grid%yblk) - IF (allocated(this%grid%xloc)) deallocate(this%grid%xloc) - IF (allocated(this%grid%yloc)) deallocate(this%grid%yloc) - allocate (this%grid%xblk (size(fgrid%xblk))) - allocate (this%grid%yblk (size(fgrid%yblk))) - allocate (this%grid%xloc (size(fgrid%xloc))) - allocate (this%grid%yloc (size(fgrid%yloc))) - - this%grid%xblk = fgrid%xblk - this%grid%yblk = fgrid%yblk - this%grid%xloc = fgrid%xloc - this%grid%yloc = fgrid%yloc - - this%npset = pixelset%nset - - IF (p_is_worker) THEN - - allocate (afrac (pixelset%nset)) - allocate (gfrom (pixelset%nset)) - - allocate (ys (pixel%nlat)) - allocate (yn (pixel%nlat)) - allocate (xw (pixel%nlon)) - allocate (xe (pixel%nlon)) - - DO ilat = 1, pixel%nlat - ys(ilat) = find_nearest_south (pixel%lat_s(ilat), fgrid%nlat, fgrid%lat_s) - yn(ilat) = find_nearest_north (pixel%lat_n(ilat), fgrid%nlat, fgrid%lat_n) - ENDDO - - DO ilon = 1, pixel%nlon - xw(ilon) = find_nearest_west (pixel%lon_w(ilon), fgrid%nlon, fgrid%lon_w) - xe(ilon) = find_nearest_east (pixel%lon_e(ilon), fgrid%nlon, fgrid%lon_e) - ENDDO - - allocate (list_lat (fgrid%nlat)) - DO iy = 1, fgrid%nlat - allocate (list_lat(iy)%val (100)) - ENDDO - - allocate (ng_lat (fgrid%nlat)) - ng_lat(:) = 0 - - DO iset = 1, pixelset%nset - - ie = pixelset%ielm(iset) - npxl = pixelset%ipxend(iset) - pixelset%ipxstt(iset) + 1 - - allocate (afrac(iset)%val (npxl)) - allocate (gfrom(iset)%ilat(npxl)) - allocate (gfrom(iset)%ilon(npxl)) - - gfrom(iset)%ng = 0 - DO ipxl = pixelset%ipxstt(iset), pixelset%ipxend(iset) - - ilat = mesh(ie)%ilat(ipxl) - ilon = mesh(ie)%ilon(ipxl) - - DO iy = ys(ilat), yn(ilat), fgrid%yinc - - lat_s = max(fgrid%lat_s(iy), pixel%lat_s(ilat)) - lat_n = min(fgrid%lat_n(iy), pixel%lat_n(ilat)) - - IF ((lat_n-lat_s) < 1.0e-6_r8) THEN - CYCLE - ENDIF - - ix = xw(ilon) - DO WHILE (.true.) - - IF (ix == xw(ilon)) THEN - lon_w = pixel%lon_w(ilon) - ELSE - lon_w = fgrid%lon_w(ix) - ENDIF - - IF (ix == xe(ilon)) THEN - lon_e = pixel%lon_e(ilon) - ELSE - lon_e = fgrid%lon_e(ix) - ENDIF - - IF (lon_e > lon_w) THEN - IF ((lon_e-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ELSE - IF ((lon_e+360.0_r8-lon_w) < 1.0e-6_r8) THEN - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - CYCLE - ENDIF - ENDIF - - area = areaquad (lat_s, lat_n, lon_w, lon_e) - - CALL insert_into_sorted_list2 ( ix, iy, & - gfrom(iset)%ng, gfrom(iset)%ilon, gfrom(iset)%ilat, & - iloc, is_new) - - IF (is_new) THEN - IF (iloc < gfrom(iset)%ng) THEN - afrac(iset)%val(iloc+1:gfrom(iset)%ng) & - = afrac(iset)%val(iloc:gfrom(iset)%ng-1) - ENDIF - - afrac(iset)%val(iloc) = area - ELSE - afrac(iset)%val(iloc) = afrac(iset)%val(iloc) + area - ENDIF - - IF (gfrom(iset)%ng == size(gfrom(iset)%ilat)) THEN - CALL expand_list (gfrom(iset)%ilat, 0.2_r8) - CALL expand_list (gfrom(iset)%ilon, 0.2_r8) - CALL expand_list (afrac(iset)%val, 0.2_r8) - ENDIF - - CALL insert_into_sorted_list1 ( & - ix, ng_lat(iy), list_lat(iy)%val, iloc) - - IF (ng_lat(iy) == size(list_lat(iy)%val)) THEN - CALL expand_list (list_lat(iy)%val, 0.2_r8) - ENDIF - - IF (ix == xe(ilon)) EXIT - ix = mod(ix,fgrid%nlon) + 1 - ENDDO - ENDDO - - ENDDO - ENDDO - - deallocate (ys) - deallocate (yn) - deallocate (xw) - deallocate (xe) - - ng_all = sum(ng_lat) - allocate (xlist(ng_all)) - allocate (ylist(ng_all)) - - ig = 0 - DO iy = 1, fgrid%nlat - IF (ng_lat(iy) > 0) THEN - DO ix = 1, ng_lat(iy) - ig = ig + 1 - xlist(ig) = list_lat(iy)%val(ix) - ylist(ig) = iy - ENDDO - ENDIF - ENDDO - - deallocate (ng_lat) - DO iy = 1, fgrid%nlat - deallocate (list_lat(iy)%val) - ENDDO - deallocate (list_lat) - -#ifdef USEMPI - allocate (ipt (ng_all)) - allocate (msk (ng_all)) - DO ig = 1, ng_all - xblk = fgrid%xblk(xlist(ig)) - yblk = fgrid%yblk(ylist(ig)) - ipt(ig) = gblock%pio(xblk,yblk) - ENDDO -#endif - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 -#ifdef USEMPI - msk = (ipt == p_address_io(iproc)) - ng = count(msk) -#else - ng = ng_all -#endif - - allocate (this%glist(iproc)%ilat (ng)) - allocate (this%glist(iproc)%ilon (ng)) - - this%glist(iproc)%ng = 0 - ENDDO - - DO ig = 1, ng_all -#ifdef USEMPI - iproc = p_itis_io(ipt(ig)) -#else - iproc = 0 -#endif - - this%glist(iproc)%ng = this%glist(iproc)%ng + 1 - - ng = this%glist(iproc)%ng - this%glist(iproc)%ilon(ng) = xlist(ig) - this%glist(iproc)%ilat(ng) = ylist(ig) - ENDDO - -#ifdef USEMPI - deallocate (ipt) - deallocate (msk) -#endif - - IF (allocated(this%address)) deallocate(this%address) - IF (allocated(this%olparea)) deallocate(this%olparea) - allocate (this%address (pixelset%nset)) - allocate (this%olparea (pixelset%nset)) - - DO iset = 1, pixelset%nset - ng = gfrom(iset)%ng - allocate (this%address(iset)%val (2,ng)) - allocate (this%olparea(iset)%val (ng)) - - this%olparea(iset)%val = afrac(iset)%val(1:ng) - - IF (pixelset%has_shared) THEN - this%olparea(iset)%val = this%olparea(iset)%val * pixelset%pctshared(iset) - ENDIF - - DO ig = 1, gfrom(iset)%ng - ilon = gfrom(iset)%ilon(ig) - ilat = gfrom(iset)%ilat(ig) - xblk = fgrid%xblk(ilon) - yblk = fgrid%yblk(ilat) - -#ifdef USEMPI - iproc = p_itis_io(gblock%pio(xblk,yblk)) -#else - iproc = 0 -#endif - - this%address(iset)%val(1,ig) = iproc - this%address(iset)%val(2,ig) = find_in_sorted_list2 ( & - ilon, ilat, this%glist(iproc)%ng, this%glist(iproc)%ilon, this%glist(iproc)%ilat) - ENDDO - ENDDO - - deallocate (xlist) - deallocate (ylist) - - DO iset = 1, pixelset%nset - deallocate (afrac(iset)%val ) - deallocate (gfrom(iset)%ilon) - deallocate (gfrom(iset)%ilat) - ENDDO - - deallocate (afrac) - deallocate (gfrom) - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - idest = p_address_io(iproc) - smesg = (/p_iam_glb, this%glist(iproc)%ng/) - - CALL mpi_send (smesg, 2, MPI_INTEGER, & - idest, mpi_tag_mesg, p_comm_glb, p_err) - - IF (this%glist(iproc)%ng > 0) THEN - CALL mpi_send (this%glist(iproc)%ilon, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - CALL mpi_send (this%glist(iproc)%ilat, this%glist(iproc)%ng, MPI_INTEGER, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - -#ifdef USEMPI - IF (p_is_io) THEN - - IF (allocated(this%glist)) deallocate(this%glist) - allocate (this%glist (0:p_np_worker-1)) - - DO iworker = 0, p_np_worker-1 - - CALL mpi_recv (rmesg, 2, MPI_INTEGER, & - MPI_ANY_SOURCE, mpi_tag_mesg, p_comm_glb, p_stat, p_err) - - isrc = rmesg(1) - nrecv = rmesg(2) - iproc = p_itis_worker(isrc) - - this%glist(iproc)%ng = nrecv - - IF (nrecv > 0) THEN - allocate (this%glist(iproc)%ilon (nrecv)) - allocate (this%glist(iproc)%ilat (nrecv)) - - CALL mpi_recv (this%glist(iproc)%ilon, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - CALL mpi_recv (this%glist(iproc)%ilat, nrecv, MPI_INTEGER, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) - ENDIF - ENDDO - ENDIF -#endif - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - - END SUBROUTINE mapping_pset2grid_build - - - !----------------------------------------------------- - SUBROUTINE map_p2g_2d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_pset2grid_type) :: this - - real(r8), intent(in) :: pdata(:) - type(block_data_real8_2d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, xblk, yblk, xloc, yloc, iloc, iset - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:) = spv - ELSE - pbuff(iproc)%val(:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - - IF (present(spv)) THEN - IF (pdata(iset) == spv) CYCLE - ENDIF - - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%olparea(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (present(spv)) THEN - IF (pbuff(iproc)%val(iloc) /= spv) THEN - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%olparea(iset)%val(ig) - ELSE - pbuff(iproc)%val(iloc) = & - pdata(iset) * this%olparea(iset)%val(ig) - ENDIF - ELSE - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%olparea(iset)%val(ig) - ENDIF - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - IF (present(spv)) THEN - IF (gbuff(ig) /= spv) THEN - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - IF (gdata%blk(xblk,yblk)%val(xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(xloc,yloc) = & - gdata%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ELSE - gdata%blk(xblk,yblk)%val(xloc,yloc) = gbuff(ig) - ENDIF - ENDIF - ELSE - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - gdata%blk(xblk,yblk)%val(xloc,yloc) = & - gdata%blk(xblk,yblk)%val(xloc,yloc) + gbuff(ig) - ENDIF - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - - END SUBROUTINE map_p2g_2d - - !----------------------------------------------------- - SUBROUTINE map_p2g_3d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_pset2grid_type) :: this - - real(r8), intent(in) :: pdata(:,:) - type(block_data_real8_3d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset - integer :: xblk, yblk, xloc, yloc - integer :: lb1, ub1, i1 - - real(r8), allocatable :: gbuff(:,:) - type(pointer_real8_2d), allocatable :: pbuff(:) - - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - lb1 = lbound(pdata,1) - ub1 = ubound(pdata,1) - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (lb1:ub1, this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:,:) = spv - ELSE - pbuff(iproc)%val(:,:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%olparea(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - DO i1 = lb1, ub1 - IF (present(spv)) THEN - IF (pdata(i1,iset) /= spv) THEN - IF (pbuff(iproc)%val(i1,iloc) /= spv) THEN - pbuff(iproc)%val(i1,iloc) = pbuff(iproc)%val(i1,iloc) & - + pdata(i1,iset) * this%olparea(iset)%val(ig) - ELSE - pbuff(iproc)%val(i1,iloc) = & - pdata(i1,iset) * this%olparea(iset)%val(ig) - ENDIF - ENDIF - ELSE - pbuff(iproc)%val(i1,iloc) = pbuff(iproc)%val(i1,iloc) & - + pdata(i1,iset) * this%olparea(iset)%val(ig) - ENDIF - ENDDO - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, & - (ub1-lb1+1) * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - lb1 = gdata%lb1 - ub1 = gdata%ub1 - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (lb1:ub1, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, & - (ub1-lb1+1) * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - DO i1 = lb1, ub1 - IF (present(spv)) THEN - IF (gbuff(i1,ig) /= spv) THEN - IF (gdata%blk(xblk,yblk)%val(i1,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) + gbuff(i1,ig) - ELSE - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = gbuff(i1,ig) - ENDIF - ENDIF - ELSE - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,xloc,yloc) + gbuff(i1,ig) - ENDIF - ENDDO - ENDDO - - deallocate (gbuff) - ENDIF - - ENDDO - - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE map_p2g_3d - - !----------------------------------------------------- - SUBROUTINE map_p2g_4d (this, pdata, gdata, spv, msk) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_pset2grid_type) :: this - - real(r8), intent(in) :: pdata(:,:,:) - type(block_data_real8_4d), intent(inout) :: gdata - - real(r8), intent(in), optional :: spv - logical, intent(in), optional :: msk(:) - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset - integer :: xblk, yblk, xloc, yloc - integer :: lb1, ub1, i1, ndim1, lb2, ub2, i2, ndim2 - - real(r8), allocatable :: gbuff(:,:,:) - type(pointer_real8_3d), allocatable :: pbuff(:) - - IF (p_is_worker) THEN - - allocate (pbuff (0:p_np_io-1)) - - lb1 = lbound(pdata,1) - ub1 = ubound(pdata,1) - ndim1 = ub1 - lb1 + 1 - - lb2 = lbound(pdata,2) - ub2 = ubound(pdata,2) - ndim2 = ub2 - lb2 + 1 - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (lb1:ub1, lb2:ub2, this%glist(iproc)%ng)) - - IF (present(spv)) THEN - pbuff(iproc)%val(:,:,:) = spv - ELSE - pbuff(iproc)%val(:,:,:) = 0.0 - ENDIF - ENDIF - ENDDO - - DO iset = 1, this%npset - IF (present(msk)) THEN - IF (.not. msk(iset)) CYCLE - ENDIF - - DO ig = 1, size(this%olparea(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - DO i1 = lb1, ub1 - DO i2 = lb2, ub2 - IF (present(spv)) THEN - IF (pdata(i1,i2,iset) /= spv) THEN - IF (pbuff(iproc)%val(i1,i2,iloc) /= spv) THEN - pbuff(iproc)%val(i1,i2,iloc) = pbuff(iproc)%val(i1,i2,iloc) & - + pdata(i1,i2,iset) * this%olparea(iset)%val(ig) - ELSE - pbuff(iproc)%val(i1,i2,iloc) = & - pdata(i1,i2,iset) * this%olparea(iset)%val(ig) - ENDIF - ENDIF - ELSE - pbuff(iproc)%val(i1,i2,iloc) = pbuff(iproc)%val(i1,i2,iloc) & - + pdata(i1,i2,iset) * this%olparea(iset)%val(ig) - ENDIF - ENDDO - ENDDO - ENDDO - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, ndim1 * ndim2 * this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - lb1 = gdata%lb1 - ub1 = gdata%ub1 - ndim1 = ub1 - lb1 + 1 - - lb2 = gdata%lb2 - ub2 = gdata%ub2 - ndim2 = ub2 - lb2 + 1 - - IF (present(spv)) THEN - CALL flush_block_data (gdata, spv) - ELSE - CALL flush_block_data (gdata, 0.0_r8) - ENDIF - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (lb1:ub1, lb2:ub2, this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, ndim1 * ndim2 * this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - DO i1 = lb1, ub1 - DO i2 = lb2, ub2 - IF (present(spv)) THEN - IF (gbuff(i1,i2,ig) /= spv) THEN - IF (gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) + gbuff(i1,i2,ig) - ELSE - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = gbuff(i1,i2,ig) - ENDIF - ENDIF - ELSE - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(i1,i2,xloc,yloc) + gbuff(i1,i2,ig) - ENDIF - ENDDO - ENDDO - ENDDO - - deallocate (gbuff) - ENDIF - ENDDO - ENDIF - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE map_p2g_4d - - !----------------------------------------------------- - SUBROUTINE map_p2g_split_to_3d (this, pdata, settyp, typidx, gdata, spv) - - USE MOD_Precision - USE MOD_Grid - USE MOD_DataType - USE MOD_SPMD_Task - IMPLICIT NONE - - class (mapping_pset2grid_type) :: this - - real(r8), intent(in) :: pdata (:) - integer , intent(in) :: settyp(:) - integer , intent(in) :: typidx(:) - type(block_data_real8_3d), intent(inout) :: gdata - - real(r8), intent(in) :: spv - - ! Local variables - integer :: iproc, idest, isrc - integer :: ig, ilon, ilat, iloc, iset, ityp, ntyps - integer :: xblk, yblk, xloc, yloc - - real(r8), allocatable :: gbuff(:) - type(pointer_real8_1d), allocatable :: pbuff (:) - - IF (p_is_worker) THEN - allocate (pbuff (0:p_np_io-1)) - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - allocate (pbuff(iproc)%val (this%glist(iproc)%ng)) - ENDIF - ENDDO - ENDIF - - IF (p_is_io) THEN - CALL flush_block_data (gdata, spv) - ENDIF - - ntyps = size(typidx) - - DO ityp = 1, ntyps - - IF (p_is_worker) THEN - - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - pbuff(iproc)%val(:) = spv - ENDIF - ENDDO - - DO iset = 1, this%npset - IF ((settyp(iset) == typidx(ityp)) .and. (pdata(iset) /= spv)) THEN - DO ig = 1, size(this%olparea(iset)%val) - iproc = this%address(iset)%val(1,ig) - iloc = this%address(iset)%val(2,ig) - - IF (pbuff(iproc)%val(iloc) /= spv) THEN - pbuff(iproc)%val(iloc) = pbuff(iproc)%val(iloc) & - + pdata(iset) * this%olparea(iset)%val(ig) - ELSE - pbuff(iproc)%val(iloc) = & - pdata(iset) * this%olparea(iset)%val(ig) - ENDIF - ENDDO - ENDIF - ENDDO - -#ifdef USEMPI - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - idest = p_address_io(iproc) - CALL mpi_send (pbuff(iproc)%val, this%glist(iproc)%ng, MPI_DOUBLE, & - idest, mpi_tag_data, p_comm_glb, p_err) - ENDIF - ENDDO -#endif - - ENDIF - - IF (p_is_io) THEN - - DO iproc = 0, p_np_worker-1 - IF (this%glist(iproc)%ng > 0) THEN - - allocate (gbuff (this%glist(iproc)%ng)) - -#ifdef USEMPI - isrc = p_address_worker(iproc) - CALL mpi_recv (gbuff, this%glist(iproc)%ng, MPI_DOUBLE, & - isrc, mpi_tag_data, p_comm_glb, p_stat, p_err) -#else - gbuff = pbuff(0)%val -#endif - - DO ig = 1, this%glist(iproc)%ng - IF (gbuff(ig) /= spv) THEN - ilon = this%glist(iproc)%ilon(ig) - ilat = this%glist(iproc)%ilat(ig) - xblk = this%grid%xblk (ilon) - yblk = this%grid%yblk (ilat) - xloc = this%grid%xloc (ilon) - yloc = this%grid%yloc (ilat) - - IF (gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) /= spv) THEN - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) = & - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) + gbuff(ig) - ELSE - gdata%blk(xblk,yblk)%val(ityp,xloc,yloc) = gbuff(ig) - ENDIF - ENDIF - ENDDO - - deallocate (gbuff) - ENDIF - - ENDDO - - ENDIF - -#ifdef USEMPI - CALL mpi_barrier (p_comm_glb, p_err) -#endif - ENDDO - - IF (p_is_worker) THEN - DO iproc = 0, p_np_io-1 - IF (this%glist(iproc)%ng > 0) THEN - deallocate (pbuff(iproc)%val) - ENDIF - ENDDO - deallocate (pbuff) - ENDIF - - END SUBROUTINE map_p2g_split_to_3d - - !----------------------------------------------------- - SUBROUTINE mapping_pset2grid_free_mem (this) - - USE MOD_SPMD_Task - IMPLICIT NONE - - type (mapping_pset2grid_type) :: this - - ! Local variables - integer :: iproc, iset - - IF (allocated (this%grid%xblk)) deallocate (this%grid%xblk) - IF (allocated (this%grid%yblk)) deallocate (this%grid%yblk) - - IF (allocated (this%grid%xloc)) deallocate (this%grid%xloc) - IF (allocated (this%grid%yloc)) deallocate (this%grid%yloc) - - IF (p_is_io) THEN - IF (allocated(this%glist)) THEN - DO iproc = 0, p_np_worker-1 - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - ENDIF - - IF (p_is_worker) THEN - IF (allocated(this%glist)) THEN - DO iproc = 0, p_np_io-1 - IF (allocated(this%glist(iproc)%ilat)) deallocate (this%glist(iproc)%ilat) - IF (allocated(this%glist(iproc)%ilon)) deallocate (this%glist(iproc)%ilon) - ENDDO - - deallocate (this%glist) - ENDIF - - IF (allocated(this%address)) THEN - DO iset = 1, this%npset - IF (allocated(this%address(iset)%val)) THEN - deallocate (this%address(iset)%val) - ENDIF - ENDDO - - deallocate (this%address) - ENDIF - - IF (allocated(this%olparea)) THEN - DO iset = 1, this%npset - IF (allocated(this%olparea(iset)%val)) THEN - deallocate (this%olparea(iset)%val) - ENDIF - ENDDO - - deallocate (this%olparea) - ENDIF - ENDIF - - END SUBROUTINE mapping_pset2grid_free_mem - -END MODULE MOD_Mapping_Pset2Grid diff --git a/share/MOD_Namelist.F90 b/share/MOD_Namelist.F90 index 6f36ca51..dc8c8c7b 100644 --- a/share/MOD_Namelist.F90 +++ b/share/MOD_Namelist.F90 @@ -378,8 +378,8 @@ MODULE MOD_Namelist !CBL height logical :: DEF_USE_CBL_HEIGHT = .false. - character(len=20) :: DEF_Forcing_Interp_Method = 'arealweight' - character(len=20) :: DEF_Hres_Grid = 'heihe_90m' + character(len=20) :: DEF_Forcing_Interp_Method = 'arealweight' ! 'arealweight' (default) or 'bilinear' + logical :: DEF_USE_Forcing_Downscaling = .false. character(len=5) :: DEF_DS_precipitation_adjust_scheme = 'II' character(len=5) :: DEF_DS_longwave_adjust_scheme = 'II' @@ -894,8 +894,6 @@ SUBROUTINE read_namelist (nlfile) DEF_Forcing_Interp_Method, & - DEF_Hres_Grid, & - DEF_USE_Forcing_Downscaling, & DEF_DS_precipitation_adjust_scheme, & DEF_DS_longwave_adjust_scheme, & @@ -1341,9 +1339,6 @@ SUBROUTINE read_namelist (nlfile) CALL mpi_bcast (DEF_HIST_CompressLevel, 1, mpi_integer, p_root, p_comm_glb, p_err) CALL mpi_bcast (DEF_Forcing_Interp_Method, 20, mpi_character, p_root, p_comm_glb, p_err) - ! added by Chen Sisi, used for reading high resolution terrain factor data - CALL mpi_bcast (DEF_Hres_Grid, 20, mpi_character, p_root, p_comm_glb, p_err) - CALL mpi_bcast (DEF_USE_Forcing_Downscaling, 1, mpi_logical, p_root, p_comm_glb, p_err) CALL mpi_bcast (DEF_DS_precipitation_adjust_scheme, 5, mpi_character, p_root, p_comm_glb, p_err) CALL mpi_bcast (DEF_DS_longwave_adjust_scheme, 5, mpi_character, p_root, p_comm_glb, p_err) diff --git a/share/MOD_NetCDFVector.F90 b/share/MOD_NetCDFVector.F90 deleted file mode 100755 index 3057b194..00000000 --- a/share/MOD_NetCDFVector.F90 +++ /dev/null @@ -1,1596 +0,0 @@ -#include - -MODULE MOD_NetCDFVector - - !---------------------------------------------------------------------------------- - ! DESCRIPTION: - ! - ! High-level Subroutines to read and write variables in files with netCDF format. - ! - ! CoLM read and write netCDF files mainly in three ways: - ! 1. Serial: read and write data by a single process; - ! 2. Vector: 1) read vector data by IO and scatter from IO to workers - ! 2) gather from workers to IO and write vectors by IO - ! Notice: each file contains vector data in one block. - ! 3. Block : read blocked data by IO - ! Notice: input file is a single file. - ! - ! This module contains subroutines of "2. Vector". - ! - ! Created by Shupeng Zhang, May 2023 - !---------------------------------------------------------------------------------- - - USE MOD_DataType - IMPLICIT NONE - - ! PUBLIC subroutines - - interface ncio_read_vector - MODULE procedure ncio_read_vector_logical_1d - MODULE procedure ncio_read_vector_int32_1d - MODULE procedure ncio_read_vector_int64_1d - MODULE procedure ncio_read_vector_real8_1d - MODULE procedure ncio_read_vector_real8_2d - MODULE procedure ncio_read_vector_real8_3d - MODULE procedure ncio_read_vector_real8_4d - END interface ncio_read_vector - - PUBLIC :: ncio_create_file_vector - PUBLIC :: ncio_define_dimension_vector - - interface ncio_write_vector - MODULE procedure ncio_write_vector_logical_1d - MODULE procedure ncio_write_vector_int32_1d - MODULE procedure ncio_write_vector_int32_3d - MODULE procedure ncio_write_vector_int64_1d - MODULE procedure ncio_write_vector_real8_1d - MODULE procedure ncio_write_vector_real8_2d - MODULE procedure ncio_write_vector_real8_3d - MODULE procedure ncio_write_vector_real8_4d - END interface ncio_write_vector - -CONTAINS - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_int32_1d ( & - filename, dataname, pixelset, rdata, defval) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - TYPE(pixelset_type), intent(in) :: pixelset - - INTEGER, allocatable, intent(inout) :: rdata (:) - INTEGER, intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER, allocatable :: sbuff(:), rbuff(:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER, & - MPI_IN_PLACE, 0, MPI_INTEGER, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER, & ! insignificant on workers - rbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_int32_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_int64_1d ( & - filename, dataname, pixelset, rdata, defval) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - TYPE(pixelset_type), intent(in) :: pixelset - - INTEGER*8, allocatable, intent(inout) :: rdata (:) - INTEGER, intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER*8, allocatable :: sbuff(:), rbuff(:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER8, & - MPI_IN_PLACE, 0, MPI_INTEGER8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER8, & ! insignificant on workers - rbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER8, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_int64_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_logical_1d (filename, dataname, pixelset, rdata, & - defval) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - TYPE(pixelset_type), intent(in) :: pixelset - - LOGICAL, allocatable, intent(inout) :: rdata (:) - LOGICAL, intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER(1), allocatable :: sbuff(:), rbuff(:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - IF (defval) THEN - sbuff(:) = 1 - ELSE - sbuff(:) = 0 - ENDIF - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER1, & - MPI_IN_PLACE, 0, MPI_INTEGER1, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = (sbuff == 1) -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER1, & ! insignificant on workers - rbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER1, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = (rbuff == 1) - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_logical_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_real8_1d (filename, dataname, pixelset, rdata, & - defval) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - TYPE(pixelset_type), intent(in) :: pixelset - - REAL(r8), allocatable, intent(inout) :: rdata (:) - REAL(r8), intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:), rbuff(:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - MPI_IN_PLACE, 0, MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - rbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_real8_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_real8_2d ( & - filename, dataname, ndim1, pixelset, rdata, defval) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - INTEGER, intent(in) :: ndim1 - TYPE(pixelset_type), intent(in) :: pixelset - - REAL(r8), allocatable, intent(inout) :: rdata (:,:) - REAL(r8), intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:), rbuff(:,:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (ndim1, pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (ndim1, pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:,:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, ndim1 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - MPI_IN_PLACE, 0, MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (ndim1, pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1,1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - rbuff, ndim1 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_real8_2d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_real8_3d ( & - filename, dataname, ndim1, ndim2, pixelset, rdata, defval) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - INTEGER, intent(in) :: ndim1, ndim2 - TYPE(pixelset_type), intent(in) :: pixelset - - REAL(r8), allocatable, intent(inout) :: rdata (:,:,:) - REAL(r8), intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:,:), rbuff(:,:,:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (ndim1,ndim2, pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (ndim1,ndim2, pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:,:,:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, ndim1 * ndim2 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * ndim2 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - MPI_IN_PLACE, 0, MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,:,istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (ndim1,ndim2, pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1,1,1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - rbuff, ndim1 * ndim2 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,:,istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_real8_3d - - !--------------------------------------------------------- - SUBROUTINE ncio_read_vector_real8_4d ( & - filename, dataname, ndim1, ndim2, ndim3, pixelset, rdata, defval) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - INTEGER, intent(in) :: ndim1, ndim2, ndim3 - TYPE(pixelset_type), intent(in) :: pixelset - - REAL(r8), allocatable, intent(inout) :: rdata (:,:,:,:) - REAL(r8), intent(in), optional :: defval - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:,:,:), rbuff(:,:,:,:) - logical :: any_file_exists, this_file_exists - - IF (p_is_worker) THEN - IF ((pixelset%nset > 0) .and. (.not. allocated(rdata))) THEN - allocate (rdata (ndim1,ndim2,ndim3, pixelset%nset)) - ENDIF - ENDIF - - any_file_exists = .false. - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (sbuff (ndim1,ndim2,ndim3, pixelset%vecgs%vlen(iblk,jblk))) - CALL get_filename_block (filename, iblk, jblk, fileblock) - - inquire (file = trim(fileblock), exist = this_file_exists) - any_file_exists = any_file_exists .or. this_file_exists - - IF (ncio_var_exist(fileblock,dataname)) THEN - CALL ncio_read_serial (fileblock, dataname, sbuff) - ELSEIF (present(defval)) THEN - sbuff(:,:,:,:) = defval - ENDIF - -#ifdef USEMPI - CALL mpi_scatterv ( & - sbuff, ndim1 * ndim2 * ndim3 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * ndim2 * ndim3 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - MPI_IN_PLACE, 0, MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,:,:,istt:iend) = sbuff -#endif - - deallocate (sbuff) - - ENDDO - -#ifdef USEMPI - CALL mpi_allreduce (MPI_IN_PLACE, any_file_exists, 1, MPI_LOGICAL, MPI_LOR, p_comm_io, p_err) -#endif - IF (.not. any_file_exists) THEN - write(*,*) 'Warning : restart file ' //trim(filename)// ' not found.' - CALL CoLM_stop () - ENDIF - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (rbuff (ndim1,ndim2,ndim3, pixelset%vecgs%vlen(iblk,jblk))) - ELSE - allocate (rbuff(1,1,1,1)) - ENDIF - - CALL mpi_scatterv ( & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - rbuff, ndim1 * ndim2 * ndim3 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rdata(:,:,:,istt:iend) = rbuff - ENDIF - - IF (allocated(rbuff)) deallocate (rbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_read_vector_real8_4d - - !--------------------------------------------------------- - SUBROUTINE ncio_create_file_vector (filename, pixelset) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - TYPE(pixelset_type), intent(in) :: pixelset - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk - CHARACTER(len=256) :: fileblock - - IF (p_is_io) THEN - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - CALL get_filename_block (filename, iblk, jblk, fileblock) - CALL ncio_create_file (fileblock) - - ENDDO - ENDIF - - END SUBROUTINE ncio_create_file_vector - - !--------------------------------------------------------- - SUBROUTINE ncio_define_dimension_vector (filename, pixelset, dimname, dimlen) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - TYPE(pixelset_type), intent(in) :: pixelset - CHARACTER(len=*), intent(in) :: dimname - INTEGER, intent(in), optional :: dimlen - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk - CHARACTER(len=256) :: fileblock - LOGICAL :: fexists - - IF (p_is_io) THEN - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - CALL get_filename_block (filename, iblk, jblk, fileblock) - inquire (file=trim(fileblock), exist=fexists) - IF (fexists) THEN - IF (present(dimlen)) THEN - CALL ncio_define_dimension (fileblock, trim(dimname), dimlen) - ELSE - CALL ncio_define_dimension (fileblock, trim(dimname), & - pixelset%vecgs%vlen(iblk,jblk)) - ENDIF - ENDIF - - ENDDO - ENDIF - - END SUBROUTINE ncio_define_dimension_vector - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_int32_1d ( & - filename, dataname, dimname, pixelset, wdata, compress_level) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dimname - TYPE(pixelset_type), intent(in) :: pixelset - INTEGER, intent(in) :: wdata (:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER, allocatable :: sbuff(:), rbuff(:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_INTEGER, & - rbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname, & - compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(istt:iend) - ELSE - allocate (sbuff (1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER, & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_int32_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_logical_1d ( & - filename, dataname, dimname, pixelset, wdata, compress_level) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dimname - TYPE(pixelset_type), intent(in) :: pixelset - LOGICAL, intent(in) :: wdata (:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend, i - CHARACTER(len=256) :: fileblock - INTEGER(1), allocatable :: sbuff(:), rbuff(:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_INTEGER1, & - rbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER1, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - do i = istt, iend - if(wdata(i))then - rbuff(i-istt+1) = 1 - else - rbuff(i-istt+1) = 0 - end if - end do -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname, & - compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - do i = istt, iend - if(wdata(i))then - sbuff(i-istt+1) = 1 - else - sbuff(i-istt+1) = 0 - end if - end do - ELSE - allocate (sbuff (1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER1, & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER1, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_logical_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_int32_3d ( & - filename, dataname, dim1name, ndim1, dim2name, ndim2, & - dim3name, pixelset, wdata, compress_level) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dim1name, dim2name, dim3name - TYPE(pixelset_type), intent(in) :: pixelset - INTEGER, intent(in) :: ndim1, ndim2 - INTEGER, intent(in) :: wdata (:,:,:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER, allocatable :: sbuff(:,:,:), rbuff(:,:,:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (ndim1,ndim2,pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_INTEGER, & - rbuff, ndim1*ndim2*pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1*ndim2*pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(:,:,istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name, compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (ndim1,ndim2, pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(:,:,istt:iend) - ELSE - allocate (sbuff (1,1,1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, ndim1*ndim2*pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER, & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_int32_3d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_int64_1d ( & - filename, dataname, dimname, pixelset, wdata, compress_level) - - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dimname - TYPE(pixelset_type), intent(in) :: pixelset - INTEGER*8, intent(in) :: wdata (:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - INTEGER*8, allocatable :: sbuff(:), rbuff(:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_INTEGER8, & - rbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_INTEGER8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname, & - compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(istt:iend) - ELSE - allocate (sbuff (1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_INTEGER8, & - MPI_INULL_P, MPI_INULL_P, MPI_INULL_P, MPI_INTEGER8, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_int64_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_real8_1d ( & - filename, dataname, dimname, pixelset, wdata, compress_level) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dimname - TYPE(pixelset_type), intent(in) :: pixelset - REAL(r8), intent(in) :: wdata (:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:), rbuff(:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv ( MPI_IN_PLACE, 0, MPI_REAL8, & - rbuff, pixelset%vecgs%vcnt(:,iblk,jblk), & - pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dimname, compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, dimname) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(istt:iend) - ELSE - allocate (sbuff (1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_real8_1d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_real8_2d ( & - filename, dataname, dim1name, ndim1, & - dim2name, pixelset, wdata, compress_level) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dim1name, dim2name - INTEGER, intent(in) :: ndim1 - TYPE(pixelset_type), intent(in) :: pixelset - REAL(r8), intent(in) :: wdata (:,:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:), rbuff(:,:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (ndim1, pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_REAL8, & - rbuff, ndim1 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(:,istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (ndim1,pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(:,istt:iend) - ELSE - allocate (sbuff (1,1)) - ENDIF - - CALL mpi_gatherv ( & - sbuff, ndim1 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_real8_2d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_real8_3d ( & - filename, dataname, dim1name, ndim1, dim2name, ndim2, & - dim3name, pixelset, wdata, compress_level) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dim1name, dim2name, dim3name - TYPE(pixelset_type), intent(in) :: pixelset - INTEGER, intent(in) :: ndim1, ndim2 - REAL(r8), intent(in) :: wdata (:,:,:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:,:), rbuff(:,:,:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (ndim1, ndim2, pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_REAL8, & - rbuff, ndim1 * ndim2 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * ndim2 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(:,:,istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name, compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (ndim1,ndim2,pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(:,:,istt:iend) - ELSE - allocate (sbuff (1,1,1)) - ENDIF - - CALL mpi_gatherv ( sbuff, & - ndim1 * ndim2 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_real8_3d - - !--------------------------------------------------------- - SUBROUTINE ncio_write_vector_real8_4d ( & - filename, dataname, dim1name, ndim1, dim2name, ndim2, dim3name, ndim3, & - dim4name, pixelset, wdata, compress_level) - - USE MOD_Precision - USE MOD_NetCDFSerial - USE MOD_SPMD_Task - USE MOD_Block - USE MOD_Pixelset - IMPLICIT NONE - - CHARACTER(len=*), intent(in) :: filename - CHARACTER(len=*), intent(in) :: dataname - CHARACTER(len=*), intent(in) :: dim1name, dim2name, dim3name, dim4name - INTEGER, intent(in) :: ndim1, ndim2, ndim3 - TYPE(pixelset_type), intent(in) :: pixelset - REAL(r8), intent(in) :: wdata (:,:,:,:) - - INTEGER, intent(in), optional :: compress_level - - ! Local variables - INTEGER :: iblkgrp, iblk, jblk, istt, iend - CHARACTER(len=256) :: fileblock - REAL(r8), allocatable :: sbuff(:,:,:,:), rbuff(:,:,:,:) - - IF (p_is_io) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - allocate (rbuff (ndim1, ndim2, ndim3, pixelset%vecgs%vlen(iblk,jblk))) -#ifdef USEMPI - CALL mpi_gatherv (MPI_IN_PLACE, 0, MPI_REAL8, & - rbuff, ndim1 * ndim2 * ndim3 * pixelset%vecgs%vcnt(:,iblk,jblk), & - ndim1 * ndim2 * ndim3 * pixelset%vecgs%vdsp(:,iblk,jblk), MPI_REAL8, & - p_root, p_comm_group, p_err) -#else - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - rbuff = wdata(:,:,:,istt:iend) -#endif - - CALL get_filename_block (filename, iblk, jblk, fileblock) - IF (present(compress_level)) THEN - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name, dim4name, compress = compress_level) - ELSE - CALL ncio_write_serial (fileblock, dataname, rbuff, & - dim1name, dim2name, dim3name, dim4name) - ENDIF - - deallocate (rbuff) - - ENDDO - - ENDIF - -#ifdef USEMPI - IF (p_is_worker) THEN - - DO iblkgrp = 1, pixelset%nblkgrp - iblk = pixelset%xblkgrp(iblkgrp) - jblk = pixelset%yblkgrp(iblkgrp) - - IF (pixelset%vecgs%vlen(iblk,jblk) > 0) THEN - allocate (sbuff (ndim1,ndim2,ndim3,pixelset%vecgs%vlen(iblk,jblk))) - istt = pixelset%vecgs%vstt(iblk,jblk) - iend = pixelset%vecgs%vend(iblk,jblk) - sbuff = wdata(:,:,:,istt:iend) - ELSE - allocate (sbuff (1,1,1,1)) - ENDIF - - CALL mpi_gatherv ( sbuff, & - ndim1 * ndim2 * ndim3 * pixelset%vecgs%vlen(iblk,jblk), MPI_REAL8, & - MPI_RNULL_P, MPI_INULL_P, MPI_INULL_P, MPI_REAL8, & ! insignificant on workers - p_root, p_comm_group, p_err) - - IF (allocated(sbuff)) deallocate (sbuff) - - ENDDO - - ENDIF -#endif - - END SUBROUTINE ncio_write_vector_real8_4d - -END MODULE MOD_NetCDFVector diff --git a/share/MOD_SpatialMapping.F90 b/share/MOD_SpatialMapping.F90 index 1b89a2c2..732c1031 100644 --- a/share/MOD_SpatialMapping.F90 +++ b/share/MOD_SpatialMapping.F90 @@ -2407,11 +2407,7 @@ SUBROUTINE spatial_mapping_normalize (this, gdata, sdata) xblk = gblock%xblkme(iblkme) yblk = gblock%yblkme(iblkme) - WHERE (isnan(sumdata%blk(xblk,yblk)%val)) - sumdata%blk(xblk,yblk)%val = this%missing_value - ENDWHERE - - WHERE ((sumdata%blk(xblk,yblk)%val /= this%missing_value)) + WHERE (sumdata%blk(xblk,yblk)%val /= this%missing_value) sumdata%blk(xblk,yblk)%val = gdata%blk(xblk,yblk)%val / sumdata%blk(xblk,yblk)%val ENDWHERE ENDDO