66! ! aerosol, IN&CCN and surface properties updates.
77 module GFS_phys_time_vary
88
9+ use mpi_f08
910#ifdef _OPENMP
1011 use omp_lib
1112#endif
@@ -79,7 +80,7 @@ end subroutine copy_error
7980! >\section gen_GFS_phys_time_vary_init GFS_phys_time_vary_init General Algorithm
8081! > @{
8182 subroutine GFS_phys_time_vary_init ( &
82- me , master , ntoz , h2o_phys , iaerclm , iaermdl , iccn , iflip , im , levs , &
83+ mpicomm , mpirank , mpiroot , ntoz , h2o_phys , iaerclm , iaermdl , iccn , iflip , im , levs , &
8384 nx , ny , idate , xlat_d , xlon_d , &
8485 jindx1_o3 , jindx2_o3 , ddy_o3 , jindx1_h , jindx2_h , ddy_h , h2opl ,fhour , &
8586 jindx1_aer , jindx2_aer , ddy_aer , iindx1_aer , iindx2_aer , ddx_aer , aer_nm , &
@@ -100,7 +101,8 @@ subroutine GFS_phys_time_vary_init (
100101 implicit none
101102
102103 ! Interface variables
103- integer , intent (in ) :: me, master, ntoz, iccn, iflip, im, nx, ny, levs, iaermdl
104+ type (MPI_Comm), intent (in ) :: mpicomm
105+ integer , intent (in ) :: mpirank, mpiroot, ntoz, iccn, iflip, im, nx, ny, levs, iaermdl
104106 logical , intent (in ) :: h2o_phys, iaerclm, lsm_cold_start
105107 integer , intent (in ) :: idate(:), iopt_lake, iopt_lake_clm, iopt_lake_flake
106108 real (kind_phys), intent (in ) :: fhour, lakefrac_threshold, lakedepth_threshold
@@ -230,9 +232,9 @@ subroutine GFS_phys_time_vary_init (
230232 if (iaerclm) then
231233 ntrcaer = ntrcaerm
232234 if (iaermdl == 1 ) then
233- call read_aerdata (me,master ,iflip,idate,errmsg,errflg)
235+ call read_aerdata (mpicomm,mpirank,mpiroot ,iflip,idate,errmsg,errflg)
234236 elseif (iaermdl == 6 ) then
235- call read_aerdata_dl(me,master,iflip, &
237+ call read_aerdata_dl(mpicomm, mpirank, mpiroot, iflip, &
236238 idate,fhour, errmsg,errflg)
237239 end if
238240 if (errflg/= 0 ) return
@@ -251,19 +253,19 @@ subroutine GFS_phys_time_vary_init (
251253
252254! > - Call read_cidata() to read IN and CCN data
253255 if (iccn == 1 ) then
254- call read_cidata (me,master )
256+ call read_cidata (mpicomm, mpirank, mpiroot )
255257 ! No consistency check needed for in/ccn data, all values are
256258 ! hardcoded in module iccn_def.F and GFS_typedefs.F90
257259 endif
258260
259261! > - Call tau_amf dats for ugwp_v1
260262 if (do_ugwp_v1) then
261- call read_tau_amf(me, master , errmsg, errflg)
263+ call read_tau_amf(mpicomm, mpirank, mpiroot , errmsg, errflg)
262264 if (errflg/= 0 ) return
263265 endif
264266
265267! > - Initialize soil vegetation (needed for sncovr calculation further down)
266- call set_soilveg(me , isot, ivegsrc, nlunit, errmsg, errflg)
268+ call set_soilveg(mpirank , isot, ivegsrc, nlunit, errmsg, errflg)
267269 if (errflg/= 0 ) return
268270
269271! > - read in NoahMP table (needed for NoahMP init)
@@ -289,8 +291,7 @@ subroutine GFS_phys_time_vary_init (
289291 if (iaerclm) then
290292 call setindxaer (im, xlat_d, jindx1_aer, &
291293 jindx2_aer, ddy_aer, xlon_d, &
292- iindx1_aer, iindx2_aer, ddx_aer, &
293- me, master)
294+ iindx1_aer, iindx2_aer, ddx_aer)
294295 iamin = min (minval (iindx1_aer), iamin)
295296 iamax = max (maxval (iindx2_aer), iamax)
296297 jamin = min (minval (jindx1_aer), jamin)
@@ -306,7 +307,7 @@ subroutine GFS_phys_time_vary_init (
306307
307308! > - Call cires_indx_ugwp to read monthly-mean GW-tau diagnosed from FV3GFS-runs that can resolve GWs
308309 if (do_ugwp_v1) then
309- call cires_indx_ugwp (im, me, master , xlat_d, jindx1_tau, jindx2_tau, &
310+ call cires_indx_ugwp (im, mpirank, mpiroot , xlat_d, jindx1_tau, jindx2_tau, &
310311 ddy_j1tau, ddy_j2tau)
311312 endif
312313
@@ -322,7 +323,7 @@ subroutine GFS_phys_time_vary_init (
322323
323324 !- -- if sncovr does not exist in the restart, need to create it
324325 if (all (sncovr < zero)) then
325- if (me == master ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: compute sncovr from weasd and soil vegetation parameters'
326+ if (mpirank == mpiroot ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: compute sncovr from weasd and soil vegetation parameters'
326327 !- -- compute sncovr from existing variables
327328 !- -- code taken directly from read_fix.f
328329 sncovr(:) = zero
@@ -343,7 +344,7 @@ subroutine GFS_phys_time_vary_init (
343344 !- -- For RUC LSM: create sncovr_ice from sncovr
344345 if (lsm == lsm_ruc) then
345346 if (all (sncovr_ice < zero)) then
346- if (me == master ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: fill sncovr_ice with sncovr for RUC LSM'
347+ if (mpirank == mpiroot ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: fill sncovr_ice with sncovr for RUC LSM'
347348 sncovr_ice(:) = sncovr(:)
348349 endif
349350 endif
@@ -353,9 +354,9 @@ subroutine GFS_phys_time_vary_init (
353354 if (iaerclm) then
354355 ! This call is outside the OpenMP section, so it should access errmsg & errflg directly.
355356 if (iaermdl== 1 ) then
356- call read_aerdataf (me, master , iflip, idate, fhour, errmsg, errflg)
357+ call read_aerdataf (mpicomm, mpirank, mpiroot , iflip, idate, fhour, errmsg, errflg)
357358 elseif (iaermdl== 6 ) then
358- call read_aerdataf_dl (me, master , iflip, idate, fhour, errmsg, errflg)
359+ call read_aerdataf_dl (mpicomm, mpirank, mpiroot , iflip, idate, fhour, errmsg, errflg)
359360 end if
360361 ! If it is moved to an OpenMP section, it must use myerrmsg, myerrflg, and copy_error.
361362 if (errflg/= 0 ) return
@@ -365,7 +366,7 @@ subroutine GFS_phys_time_vary_init (
365366 !- -- land and ice - not for restart runs
366367 lsm_init: if (lsm_cold_start) then
367368 if (lsm == lsm_noahmp .or. lsm == lsm_ruc) then
368- if (me == master ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: initialize albedo for land and ice'
369+ if (mpirank == mpiroot ) write (* ,' (a)' ) ' GFS_phys_time_vary_init: initialize albedo for land and ice'
369370 do ix= 1 ,im
370371 albdvis_lnd(ix) = 0.2_kind_phys
371372 albdnir_lnd(ix) = 0.2_kind_phys
@@ -705,7 +706,7 @@ end subroutine GFS_phys_time_vary_init
705706! >\section gen_GFS_phys_time_vary_timestep_init GFS_phys_time_vary_timestep_init General Algorithm
706707! > @{
707708 subroutine GFS_phys_time_vary_timestep_init ( &
708- me , master , cnx , cny , isc , jsc , nrcm , im , levs , kdt , idate , cplflx , &
709+ mpicomm , mpirank , mpiroot , cnx , cny , isc , jsc , nrcm , im , levs , kdt , idate , cplflx , &
709710 nsswr , fhswr , lsswr , fhour , &
710711 imfdeepcnv , cal_pre , random_clds , nscyc , ntoz , h2o_phys , iaerclm , iaermdl , iccn , clstp , &
711712 jindx1_o3 , jindx2_o3 , ddy_o3 , ozpl , jindx1_h , jindx2_h , ddy_h , h2opl , iflip , &
@@ -724,7 +725,8 @@ subroutine GFS_phys_time_vary_timestep_init (
724725 implicit none
725726
726727 ! Interface variables
727- integer , intent (in ) :: me, master, cnx, cny, isc, jsc, nrcm, im, levs, kdt, &
728+ type (MPI_Comm), intent (in ) :: mpicomm
729+ integer , intent (in ) :: mpirank, mpiroot, cnx, cny, isc, jsc, nrcm, im, levs, kdt, &
728730 nsswr, imfdeepcnv, iccn, nscyc, ntoz, iflip, iaermdl
729731 integer , intent (in ) :: idate(:)
730732 real (kind_phys), intent (in ) :: fhswr, fhour
@@ -794,8 +796,8 @@ subroutine GFS_phys_time_vary_timestep_init (
794796! $OMP parallel num_threads(nthrds) default(none) &
795797! $OMP shared(kdt,nsswr,lsswr,clstp,imfdeepcnv,cal_pre,random_clds) &
796798! $OMP shared(fhswr,fhour,seed0,cnx,cny,nrcm,wrk,rannie,rndval, iaermdl) &
797- ! $OMP shared(rann,im,isc,jsc,imap,jmap,ntoz,me ,idate,jindx1_o3,jindx2_o3) &
798- ! $OMP shared(ozpl,ddy_o3,h2o_phys,jindx1_h,jindx2_h,h2opl,ddy_h,iaerclm,master) &
799+ ! $OMP shared(rann,im,isc,jsc,imap,jmap,ntoz,mpirank ,idate,jindx1_o3,jindx2_o3) &
800+ ! $OMP shared(ozpl,ddy_o3,h2o_phys,jindx1_h,jindx2_h,h2opl,ddy_h,iaerclm,mpiroot) &
799801! $OMP shared(levs,prsl,iccn,jindx1_ci,jindx2_ci,ddy_ci,iindx1_ci,iindx2_ci) &
800802! $OMP shared(ddx_ci,in_nm,ccn_nm,do_ugwp_v1,jindx1_tau,jindx2_tau,ddy_j1tau) &
801803! $OMP shared(ddy_j2tau,tau_amf,iflip,ozphys,h2ophys,rjday,n1,n2,idat,jdat,rinc) &
@@ -883,7 +885,7 @@ subroutine GFS_phys_time_vary_timestep_init (
883885! $OMP section
884886! > - Call ciinterpol() to make IN and CCN data interpolation
885887 if (iccn == 1 ) then
886- call ciinterpol (me , im, idate, fhour, &
888+ call ciinterpol (mpirank , im, idate, fhour,&
887889 jindx1_ci, jindx2_ci, &
888890 ddy_ci, iindx1_ci, &
889891 iindx2_ci, ddx_ci, &
@@ -893,7 +895,7 @@ subroutine GFS_phys_time_vary_timestep_init (
893895! $OMP section
894896! > - Call cires_indx_ugwp to read monthly-mean GW-tau diagnosed from FV3GFS-runs that resolve GW-activ
895897 if (do_ugwp_v1) then
896- call tau_amf_interp(me, master , im, idate, fhour, &
898+ call tau_amf_interp(mpirank, mpiroot , im, idate, fhour, &
897899 jindx1_tau, jindx2_tau, &
898900 ddy_j1tau, ddy_j2tau, tau_amf)
899901 endif
@@ -906,13 +908,13 @@ subroutine GFS_phys_time_vary_timestep_init (
906908 ! aerinterpol is using threading inside, don't
907909 ! move into OpenMP parallel section above
908910 if (iaermdl== 1 ) then
909- call aerinterpol (me, master , nthrds, im, idate, &
911+ call aerinterpol (mpicomm, mpirank, mpiroot , nthrds, im, idate, &
910912 fhour, iflip, jindx1_aer, jindx2_aer, &
911913 ddy_aer, iindx1_aer, &
912914 iindx2_aer, ddx_aer, &
913915 levs, prsl, aer_nm, errmsg, errflg)
914916 else if (iaermdl== 6 ) then
915- call aerinterpol_dl (me, master, nthrds, im, idate, &
917+ call aerinterpol_dl (mpicomm, mpirank, mpiroot, nthrds, im, idate, &
916918 fhour, iflip, jindx1_aer, jindx2_aer, &
917919 ddy_aer, iindx1_aer, &
918920 iindx2_aer, ddx_aer, &
@@ -924,7 +926,7 @@ subroutine GFS_phys_time_vary_timestep_init (
924926! > - Call gcycle() to repopulate specific time-varying surface properties for AMIP/forecast runs
925927 if (nscyc > 0 ) then
926928 if (mod (kdt,nscyc) == 1 ) THEN
927- call gcycle (me , nthrds, nx, ny, isc, jsc, nsst, tile_num, nlunit, fn_nml, &
929+ call gcycle (mpirank , nthrds, nx, ny, isc, jsc, nsst, tile_num, nlunit, fn_nml, &
928930 input_nml_file, lsoil, lsoil_lsm, kice, idate, ialb, isot, ivegsrc, &
929931 use_ufo, nst_anl, fhcyc, phour, landfrac, lakefrac, min_seaice, min_lakeice,&
930932 frac_grid, smc, slc, stc, smois, sh2o, tslb, tiice, tg3, tref, tsfc, &
0 commit comments