start_em.F
References to this file elsewhere.
1 !-------------------------------------------------------------------
2
3 SUBROUTINE start_domain_em ( grid, allowed_to_read &
4 ! Actual arguments generated from Registry
5 # include "em_dummy_new_args.inc"
6 !
7 )
8
9 USE module_domain
10 USE module_dm
11 ! USE module_io_domain
12 USE module_state_description
13 USE module_model_constants
14 USE module_bc
15 USE module_bc_em
16 ! USE module_timing
17 USE module_configure
18 USE module_tiles
19
20 USE module_physics_init
21 #ifdef WRF_CHEM
22 USE module_aerosols_sorgam, only: sum_pm_sorgam
23 USE module_mosaic_driver, only: sum_pm_mosaic
24 #endif
25
26 #ifdef DM_PARALLEL
27 USE module_dm
28 #endif
29
30 !!debug
31 !USE module_compute_geop
32
33 USE module_model_constants
34 IMPLICIT NONE
35 ! Input data.
36 TYPE (domain) :: grid
37
38 LOGICAL , INTENT(IN) :: allowed_to_read
39
40 ! Definitions of dummy arguments to this routine (generated from Registry).
41 # include "em_dummy_new_decl.inc"
42
43 ! Structure that contains run-time configuration (namelist) data for domain
44 TYPE (grid_config_rec_type) :: config_flags
45
46 ! Local data
47 INTEGER :: &
48 ids, ide, jds, jde, kds, kde, &
49 ims, ime, jms, jme, kms, kme, &
50 ips, ipe, jps, jpe, kps, kpe, &
51 its, ite, jts, jte, kts, kte, &
52 ij,i,j,k,ii,jj,kk,loop,error,l
53
54 INTEGER :: i_m
55
56 REAL :: p00, t00, a, p_surf, pd_surf
57 #ifdef WRF_CHEM
58 REAL RGASUNIV ! universal gas constant [ J/mol-K ]
59 PARAMETER ( RGASUNIV = 8.314510 )
60 REAL,DIMENSION(grid%sm31:grid%em31,grid%sm32:grid%em32,grid%sm33:grid%em33) :: &
61 z_at_w,convfac
62 REAL :: tempfac
63 #endif
64
65 REAL :: qvf1, qvf2, qvf
66 REAL :: MPDT
67 REAL :: spongeweight
68 LOGICAL :: first_trip_for_this_domain, start_of_simulation
69
70 REAL :: lat1 , lat2 , lat3 , lat4
71 REAL :: lon1 , lon2 , lon3 , lon4
72 INTEGER :: num_points_lat_lon , iloc , jloc
73 CHARACTER (LEN=132) :: message
74
75 ! Needed by some comm layers, grid%e.g. RSL. If needed, nmm_data_calls.inc is
76 ! generated from the registry. The definition of REGISTER_I1 allows
77 ! I1 data to be communicated in this routine if necessary.
78 #ifdef DM_PARALLEL
79 # include "em_data_calls.inc"
80 #endif
81 CALL get_ijk_from_grid ( grid , &
82 ids, ide, jds, jde, kds, kde, &
83 ims, ime, jms, jme, kms, kme, &
84 ips, ipe, jps, jpe, kps, kpe )
85
86 #ifdef TEST_FOR_IJK_IN_RSL_LITE
87 grid%xxx_1 = 0.
88 grid%xxx_2 = 1.
89 do k = kps, kpe
90 do j = MAX(jds+1,jps), MIN(jde-2,jpe)
91 do i = MAX(ids+1,ips), MIN(ide-2,ipe)
92 grid%xxx_2(i,j,k) = 0.
93 enddo
94 enddo
95 enddo
96 return
97 #endif
98
99
100 kts = kps ; kte = kpe ! note that tile is entire patch
101 its = ips ; ite = ipe ! note that tile is entire patch
102 jts = jps ; jte = jpe ! note that tile is entire patch
103
104 CALL model_to_grid_config_rec ( grid%id , model_config_rec , config_flags )
105
106 IF ( ( MOD (ide-ids,config_flags%parent_grid_ratio) .NE. 0 ) .OR. &
107 ( MOD (jde-jds,config_flags%parent_grid_ratio) .NE. 0 ) ) THEN
108 WRITE(message, FMT='("Nested dimensions are illegal for domain ",I2,": Both &
109 &MOD(",I4,"-",I1,",",I2,") and MOD(",I4,"-",I1,",",I2,") must = 0" )') &
110 grid%id,ide,ids,config_flags%parent_grid_ratio,jde,jds,config_flags%parent_grid_ratio
111 CALL wrf_error_fatal ( message )
112 END IF
113
114 ! here we check to see if the boundary conditions are set properly
115
116 CALL boundary_condition_check( config_flags, bdyzone, error, grid%id )
117
118 !kludge - need to stop CG from resetting precip and phys tendencies to zero
119 ! when we are in here due to a nest being spawned, we want to still
120 ! recompute the base state, but that is about it
121 ! This is temporary and will need to be changed when grid%itimestep is removed.
122
123 IF ( grid%itimestep .EQ. 0 ) THEN
124 first_trip_for_this_domain = .TRUE.
125 ELSE
126 first_trip_for_this_domain = .FALSE.
127 END IF
128
129 IF ( .not. ( config_flags%restart .or. grid%moved ) ) THEN
130 grid%itimestep=0
131 ENDIF
132
133 IF ( config_flags%restart .or. grid%moved ) THEN
134 first_trip_for_this_domain = .TRUE.
135 ENDIF
136
137 IF (config_flags%specified) THEN
138 !
139 ! Arrays for specified boundary conditions
140 ! wig: Add a combined exponential+linear weight on the mother boundaries
141 ! following code changes by Ruby Leung. For the nested grid, there
142 ! appears to be some problems when a sponge is used. The points where
143 ! processors meet have problematic values.
144
145 DO loop = grid%spec_zone + 1, grid%spec_zone + grid%relax_zone
146 grid%fcx(loop) = 0.1 / grid%dt * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
147 grid%gcx(loop) = 1.0 / grid%dt / 50. * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
148 ! spongeweight=exp(-(loop-2)/3.)
149 ! grid%fcx(loop) = grid%fcx(loop)*spongeweight
150 ! grid%gcx(loop) = grid%gcx(loop)*spongeweight
151 ENDDO
152
153 ELSE IF (config_flags%nested) THEN
154 !
155 ! Arrays for specified boundary conditions
156
157 DO loop = grid%spec_zone + 1, grid%spec_zone + grid%relax_zone
158 grid%fcx(loop) = 0.1 / grid%dt * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
159 grid%gcx(loop) = 1.0 / grid%dt / 50. * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
160 ! spongeweight=exp(-(loop-2)/3.)
161 ! grid%fcx(loop) = grid%fcx(loop)*spongeweight
162 ! grid%gcx(loop) = grid%gcx(loop)*spongeweight
163 ! grid%fcx(loop) = 0.
164 ! grid%gcx(loop) = 0.
165 ENDDO
166
167 grid%dtbc = 0.
168
169 ENDIF
170
171 ! IF ( grid%id .NE. 1 ) THEN
172
173 ! Every time a domain starts or every time a domain moves, this routine is called. We want
174 ! the center (middle) lat/lon of the grid for the metacode. The lat/lon values are
175 ! defined at mass points. Depending on the even/odd points in the SN and WE directions,
176 ! we end up with the middle point as either 1 point or an average of either 2 or 4 points.
177 ! Add to this, the need to make sure that we are on the correct patch to retrieve the
178 ! value of the lat/lon, AND that the lat/lons (for an average) may not all be on the same
179 ! patch. Once we find the correct value for lat lon, we need to keep it around on all patches,
180 ! which is where the wrf_dm_min_real calls come in.
181
182 IF ( ( MOD(ide,2) .EQ. 0 ) .AND. ( MOD(jde,2) .EQ. 0 ) ) THEN
183 num_points_lat_lon = 1
184 iloc = ide/2
185 jloc = jde/2
186 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
187 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
188 lat1 = grid%xlat (iloc,jloc)
189 lon1 = grid%xlong(iloc,jloc)
190 ELSE
191 lat1 = 99999.
192 lon1 = 99999.
193 END IF
194 lat1 = wrf_dm_min_real ( lat1 )
195 lon1 = wrf_dm_min_real ( lon1 )
196 CALL nl_set_cen_lat ( grid%id , lat1 )
197 CALL nl_set_cen_lon ( grid%id , lon1 )
198 ELSE IF ( ( MOD(ide,2) .NE. 0 ) .AND. ( MOD(jde,2) .EQ. 0 ) ) THEN
199 num_points_lat_lon = 2
200 iloc = (ide-1)/2
201 jloc = jde /2
202 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
203 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
204 lat1 = grid%xlat (iloc,jloc)
205 lon1 = grid%xlong(iloc,jloc)
206 ELSE
207 lat1 = 99999.
208 lon1 = 99999.
209 END IF
210 lat1 = wrf_dm_min_real ( lat1 )
211 lon1 = wrf_dm_min_real ( lon1 )
212
213 iloc = (ide+1)/2
214 jloc = jde /2
215 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
216 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
217 lat2 = grid%xlat (iloc,jloc)
218 lon2 = grid%xlong(iloc,jloc)
219 ELSE
220 lat2 = 99999.
221 lon2 = 99999.
222 END IF
223 lat2 = wrf_dm_min_real ( lat2 )
224 lon2 = wrf_dm_min_real ( lon2 )
225
226 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 ) * 0.50 )
227 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 ) * 0.50 )
228 ELSE IF ( ( MOD(ide,2) .EQ. 0 ) .AND. ( MOD(jde,2) .NE. 0 ) ) THEN
229 num_points_lat_lon = 2
230 iloc = ide /2
231 jloc = (jde-1)/2
232 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
233 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
234 lat1 = grid%xlat (iloc,jloc)
235 lon1 = grid%xlong(iloc,jloc)
236 ELSE
237 lat1 = 99999.
238 lon1 = 99999.
239 END IF
240 lat1 = wrf_dm_min_real ( lat1 )
241 lon1 = wrf_dm_min_real ( lon1 )
242
243 iloc = ide /2
244 jloc = (jde+1)/2
245 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
246 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
247 lat2 = grid%xlat (iloc,jloc)
248 lon2 = grid%xlong(iloc,jloc)
249 ELSE
250 lat2 = 99999.
251 lon2 = 99999.
252 END IF
253 lat2 = wrf_dm_min_real ( lat2 )
254 lon2 = wrf_dm_min_real ( lon2 )
255
256 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 ) * 0.50 )
257 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 ) * 0.50 )
258 ELSE IF ( ( MOD(ide,2) .NE. 0 ) .AND. ( MOD(jde,2) .NE. 0 ) ) THEN
259 num_points_lat_lon = 4
260 iloc = (ide-1)/2
261 jloc = (jde-1)/2
262 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
263 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
264 lat1 = grid%xlat (iloc,jloc)
265 lon1 = grid%xlong(iloc,jloc)
266 ELSE
267 lat1 = 99999.
268 lon1 = 99999.
269 END IF
270 lat1 = wrf_dm_min_real ( lat1 )
271 lon1 = wrf_dm_min_real ( lon1 )
272
273 iloc = (ide+1)/2
274 jloc = (jde-1)/2
275 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
276 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
277 lat2 = grid%xlat (iloc,jloc)
278 lon2 = grid%xlong(iloc,jloc)
279 ELSE
280 lat2 = 99999.
281 lon2 = 99999.
282 END IF
283 lat2 = wrf_dm_min_real ( lat2 )
284 lon2 = wrf_dm_min_real ( lon2 )
285
286 iloc = (ide-1)/2
287 jloc = (jde+1)/2
288 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
289 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
290 lat3 = grid%xlat (iloc,jloc)
291 lon3 = grid%xlong(iloc,jloc)
292 ELSE
293 lat3 = 99999.
294 lon3 = 99999.
295 END IF
296 lat3 = wrf_dm_min_real ( lat3 )
297 lon3 = wrf_dm_min_real ( lon3 )
298
299 iloc = (ide+1)/2
300 jloc = (jde+1)/2
301 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
302 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
303 lat4 = grid%xlat (iloc,jloc)
304 lon4 = grid%xlong(iloc,jloc)
305 ELSE
306 lat4 = 99999.
307 lon4 = 99999.
308 END IF
309 lat4 = wrf_dm_min_real ( lat4 )
310 lon4 = wrf_dm_min_real ( lon4 )
311
312 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 + lat3 + lat4 ) * 0.25 )
313 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 + lon3 + lon4 ) * 0.25 )
314 END IF
315 ! END IF
316
317 IF ( .NOT. config_flags%restart .AND. &
318 (( config_flags%input_from_hires ) .OR. ( config_flags%input_from_file ))) THEN
319
320 IF ( config_flags%map_proj .EQ. 0 ) THEN
321 CALL wrf_error_fatal ( 'start_domain: Idealized case cannot have a separate nested input file' )
322 END IF
323
324 CALL nl_get_base_pres ( 1 , p00 )
325 CALL nl_get_base_temp ( 1 , t00 )
326 CALL nl_get_base_lapse ( 1 , a )
327
328 ! Base state potential temperature and inverse density (alpha = 1/rho) from
329 ! the half eta levels and the base-profile surface pressure. Compute 1/rho
330 ! from equation of state. The potential temperature is a perturbation from t0.
331
332 DO j = jts, MIN(jte,jde-1)
333 DO i = its, MIN(ite,ide-1)
334
335 ! Base state pressure is a function of eta level and terrain, only, plus
336 ! the hand full of constants: p00 (sea level pressure, Pa), t00 (sea level
337 ! temperature, K), and A (temperature difference, from 1000 mb to 300 mb, K).
338
339 p_surf = p00 * EXP ( -t00/a + ( (t00/a)**2 - 2.*g*grid%ht(i,j)/a/r_d ) **0.5 )
340
341 DO k = 1, kte-1
342 grid%em_pb(i,k,j) = grid%em_znu(k)*(p_surf - grid%p_top) + grid%p_top
343 grid%em_t_init(i,k,j) = (t00 + A*LOG(grid%em_pb(i,k,j)/p00))*(p00/grid%em_pb(i,k,j))**(r_d/cp) - t0
344 grid%em_alb(i,k,j) = (r_d/p1000mb)*(grid%em_t_init(i,k,j)+t0)*(grid%em_pb(i,k,j)/p1000mb)**cvpm
345 END DO
346
347 ! Base state mu is defined as base state surface pressure minus grid%p_top
348
349 grid%em_mub(i,j) = p_surf - grid%p_top
350
351 ! Integrate base geopotential, starting at terrain elevation. This assures that
352 ! the base state is in exact hydrostatic balance with respect to the model equations.
353 ! This field is on full levels.
354
355 grid%em_phb(i,1,j) = grid%ht(i,j) * g
356 DO k = 2,kte
357 grid%em_phb(i,k,j) = grid%em_phb(i,k-1,j) - grid%em_dnw(k-1)*grid%em_mub(i,j)*grid%em_alb(i,k-1,j)
358 END DO
359 END DO
360 END DO
361
362 ENDIF
363
364 IF(.not.config_flags%restart)THEN
365
366 ! if this is for a nested domain, the defined/interpolated fields are the _2
367
368 IF ( first_trip_for_this_domain ) THEN
369
370 ! data that is expected to be zero must be explicitly initialized as such
371 grid%h_diabatic = 0.
372
373 DO j = jts,min(jte,jde-1)
374 DO k = kts,kte-1
375 DO i = its, min(ite,ide-1)
376 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
377 grid%em_t_1(i,k,j)=grid%em_t_2(i,k,j)
378 ENDIF
379 ENDDO
380 ENDDO
381 ENDDO
382
383 DO j = jts,min(jte,jde-1)
384 DO i = its, min(ite,ide-1)
385 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
386 grid%em_mu_1(i,j)=grid%em_mu_2(i,j)
387 ENDIF
388 ENDDO
389 ENDDO
390 END IF
391
392 ! reconstitute base-state fields
393
394 IF(config_flags%max_dom .EQ. 1)THEN
395 ! with single domain, grid%em_t_init from wrfinput is OK to use
396 DO j = jts,min(jte,jde-1)
397 DO k = kts,kte-1
398 DO i = its, min(ite,ide-1)
399 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
400 grid%em_pb(i,k,j) = grid%em_znu(k)*grid%em_mub(i,j)+grid%p_top
401 grid%em_alb(i,k,j) = (r_d/p1000mb)*(grid%em_t_init(i,k,j)+t0)*(grid%em_pb(i,k,j)/p1000mb)**cvpm
402 ENDIF
403 ENDDO
404 ENDDO
405 ENDDO
406 ELSE
407 ! with nests, grid%em_t_init generally needs recomputations (since it is not interpolated)
408 DO j = jts,min(jte,jde-1)
409 DO k = kts,kte-1
410 DO i = its, min(ite,ide-1)
411 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
412 grid%em_pb(i,k,j) = grid%em_znu(k)*grid%em_mub(i,j)+grid%p_top
413 grid%em_alb(i,k,j) = -grid%em_rdnw(k)*(grid%em_phb(i,k+1,j)-grid%em_phb(i,k,j))/grid%em_mub(i,j)
414 grid%em_t_init(i,k,j) = grid%em_alb(i,k,j)*(p1000mb/r_d)/((grid%em_pb(i,k,j)/p1000mb)**cvpm) - t0
415 ENDIF
416 ENDDO
417 ENDDO
418 ENDDO
419 ENDIF
420
421 DO j = jts,min(jte,jde-1)
422
423 k = kte-1
424 DO i = its, min(ite,ide-1)
425 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
426 qvf1 = 0.5*(moist(i,k,j,P_QV)+moist(i,k,j,P_QV))
427 qvf2 = 1./(1.+qvf1)
428 qvf1 = qvf1*qvf2
429 grid%em_p(i,k,j) = - 0.5*(grid%em_mu_1(i,j)+qvf1*grid%em_mub(i,j))/grid%em_rdnw(k)/qvf2
430 qvf = 1. + rvovrd*moist(i,k,j,P_QV)
431 grid%em_alt(i,k,j) = (r_d/p1000mb)*(grid%em_t_1(i,k,j)+t0)*qvf*(((grid%em_p(i,k,j)+grid%em_pb(i,k,j))/p1000mb)**cvpm)
432 grid%em_al(i,k,j) = grid%em_alt(i,k,j) - grid%em_alb(i,k,j)
433 ENDIF
434 ENDDO
435
436 DO k = kte-2, 1, -1
437 DO i = its, min(ite,ide-1)
438 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
439 qvf1 = 0.5*(moist(i,k,j,P_QV)+moist(i,k+1,j,P_QV))
440 qvf2 = 1./(1.+qvf1)
441 qvf1 = qvf1*qvf2
442 grid%em_p(i,k,j) = grid%em_p(i,k+1,j) - (grid%em_mu_1(i,j) + qvf1*grid%em_mub(i,j))/qvf2/grid%em_rdn(k+1)
443 qvf = 1. + rvovrd*moist(i,k,j,P_QV)
444 grid%em_alt(i,k,j) = (r_d/p1000mb)*(grid%em_t_1(i,k,j)+t0)*qvf* &
445 (((grid%em_p(i,k,j)+grid%em_pb(i,k,j))/p1000mb)**cvpm)
446 grid%em_al(i,k,j) = grid%em_alt(i,k,j) - grid%em_alb(i,k,j)
447 ENDIF
448 ENDDO
449 ENDDO
450
451 ENDDO
452
453 ENDIF
454
455 IF ( ( grid%id .NE. 1 ) .AND. .NOT. ( config_flags%restart ) .AND. &
456 ( ( config_flags%input_from_hires ) .OR. ( config_flags%input_from_file ) ) ) THEN
457 DO j = jts, MIN(jte,jde-1)
458 DO i = its, MIN(ite,ide-1)
459 grid%em_mu_2(i,j) = grid%em_mu_2(i,j) + grid%em_al(i,1,j) / ( grid%em_alt(i,1,j) * grid%em_alb(i,1,j) ) * &
460 g * ( grid%ht(i,j) - grid%ht_fine(i,j) )
461 END DO
462 END DO
463 DO j = jts,min(jte,jde-1)
464 DO i = its, min(ite,ide-1)
465 grid%em_mu_1(i,j)=grid%em_mu_2(i,j)
466 ENDDO
467 ENDDO
468
469 END IF
470
471 IF ( first_trip_for_this_domain ) THEN
472
473 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: Before call to phy_init' )
474
475 ! namelist MPDT does not exist yet, so set it here
476 ! MPDT is the call frequency for microphysics in minutes (0 means every step)
477 MPDT = 0.
478
479 ! set GMT outside of phy_init because phy_init may not be called on this
480 ! process if, for example, it is a moving nest and if this part of the domain is not
481 ! being initialized (not the leading edge).
482 CALL domain_setgmtetc( grid, start_of_simulation )
483
484 CALL set_tiles ( grid , grid%imask_nostag, ims, ime, jms, jme, ips, ipe, jps, jpe )
485
486 ! Phy_init is not necessarily thread-safe; do not multi-thread this loop.
487 ! The tiling is to handle the fact that we may be masking off part of the computation.
488 DO ij = 1, grid%num_tiles
489
490 CALL phy_init ( grid%id , config_flags, grid%DT, grid%RESTART, grid%em_znw, grid%em_znu, &
491 grid%p_top, grid%tsk, grid%RADT,grid%BLDT,grid%CUDT, MPDT, &
492 grid%rthcuten, grid%rqvcuten, grid%rqrcuten, &
493 grid%rqccuten, grid%rqscuten, grid%rqicuten, &
494 grid%rublten,grid%rvblten,grid%rthblten, &
495 grid%rqvblten,grid%rqcblten,grid%rqiblten, &
496 grid%rthraten,grid%rthratenlw,grid%rthratensw, &
497 grid%stepbl,grid%stepra,grid%stepcu, &
498 grid%w0avg, grid%rainnc, grid%rainc, grid%raincv, grid%rainncv, &
499 grid%nca,grid%swrad_scat, &
500 grid%cldefi,grid%lowlyr, &
501 grid%mass_flux, &
502 grid%rthften, grid%rqvften, &
503 grid%cldfra,grid%glw,grid%gsw,grid%emiss,grid%lu_index, &
504 grid%landuse_ISICE, grid%landuse_LUCATS, &
505 grid%landuse_LUSEAS, grid%landuse_ISN, &
506 grid%lu_state, &
507 grid%xlat,grid%xlong,grid%albedo,grid%albbck,grid%GMT,grid%JULYR,grid%JULDAY, &
508 grid%levsiz, num_ozmixm, num_aerosolc, grid%paerlev, &
509 grid%tmn,grid%xland,grid%znt,grid%z0,grid%ust,grid%mol,grid%pblh,grid%tke_myj, &
510 grid%exch_h,grid%thc,grid%snowc,grid%mavail,grid%hfx,grid%qfx,grid%rainbl, &
511 grid%tslb,grid%zs,grid%dzs,config_flags%num_soil_layers,grid%warm_rain, &
512 grid%adv_moist_cond, &
513 grid%apr_gr,grid%apr_w,grid%apr_mc,grid%apr_st,grid%apr_as, &
514 grid%apr_capma,grid%apr_capme,grid%apr_capmi, &
515 grid%xice,grid%vegfra,grid%snow,grid%canwat,grid%smstav, &
516 grid%smstot, grid%sfcrunoff,grid%udrunoff,grid%grdflx,grid%acsnow, &
517 grid%acsnom,grid%ivgtyp,grid%isltyp, grid%sfcevp,grid%smois, &
518 grid%sh2o, grid%snowh, grid%smfr3d, &
519 grid%DX,grid%DY,grid%f_ice_phy,grid%f_rain_phy,grid%f_rimef_phy, &
520 grid%mp_restart_state,grid%tbpvs_state,grid%tbpvs0_state,&
521 allowed_to_read, grid%moved, start_of_simulation, &
522 ids, ide, jds, jde, kds, kde, &
523 ims, ime, jms, jme, kms, kme, &
524 grid%i_start(ij), grid%i_end(ij), grid%j_start(ij), grid%j_end(ij), kts, kte, &
525 ozmixm,grid%pin, & ! Optional
526 grid%m_ps_1,grid%m_ps_2,grid%m_hybi,aerosolc_1,aerosolc_2,& ! Optional
527 grid%rundgdten,grid%rvndgdten,grid%rthndgdten, & ! Optional
528 grid%rqvndgdten,grid%rmundgdten, & ! Optional
529 grid%FGDT,grid%stepfg, & ! Optional
530 grid%DZR, grid%DZB, grid%DZG, & !Optional urban
531 grid%TR_URB2D,grid%TB_URB2D,grid%TG_URB2D,grid%TC_URB2D, & !Optional urban
532 grid%QC_URB2D, grid%XXXR_URB2D,grid%XXXB_URB2D, & !Optional urban
533 grid%XXXG_URB2D, grid%XXXC_URB2D, & !Optional urban
534 grid%TRL_URB3D, grid%TBL_URB3D, grid%TGL_URB3D, & !Optional urban
535 grid%SH_URB2D, grid%LH_URB2D, grid%G_URB2D, grid%RN_URB2D, & !Optional urban
536 grid%TS_URB2D, grid%FRC_URB2D, grid%UTYPE_URB2D, & !Optional urban
537 itimestep=grid%itimestep, fdob=grid%fdob &
538 )
539
540 ENDDO
541
542
543
544 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: After call to phy_init' )
545
546 #ifdef MCELIO
547 LU_MASK = 0.
548 WHERE ( grid%lu_index .EQ. 16 ) LU_MASK = 1.
549 #endif
550
551 END IF
552
553 #if 0
554 #include "CYCLE_TEST.inc"
555 #endif
556
557 !
558 !
559
560 ! set physical boundary conditions for all initialized variables
561
562 !-----------------------------------------------------------------------
563 ! Stencils for patch communications (WCS, 29 June 2001)
564 ! Note: the size of this halo exchange reflects the
565 ! fact that we are carrying the uncoupled variables
566 ! as state variables in the mass coordinate model, as
567 ! opposed to the coupled variables as in the height
568 ! coordinate model.
569 !
570 ! * * * * *
571 ! * * * * * * * * *
572 ! * + * * + * * * + * *
573 ! * * * * * * * * *
574 ! * * * * *
575 !
576 !j grid%em_u_1 x
577 !j grid%em_u_2 x
578 !j grid%em_v_1 x
579 !j grid%em_v_2 x
580 !j grid%em_w_1 x
581 !j grid%em_w_2 x
582 !j grid%em_t_1 x
583 !j grid%em_t_2 x
584 !j grid%em_ph_1 x
585 !j grid%em_ph_2 x
586 !
587 !j grid%em_t_init x
588 !
589 !j grid%em_phb x
590 !j grid%em_ph0 x
591 !j grid%em_php x
592 !j grid%em_pb x
593 !j grid%em_al x
594 !j grid%em_alt x
595 !j grid%em_alb x
596 !
597 ! the following are 2D (xy) variables
598 !
599 !j grid%em_mu_1 x
600 !j grid%em_mu_2 x
601 !j grid%em_mub x
602 !j grid%em_mu0 x
603 !j grid%ht x
604 !j grid%msft x
605 !j grid%msfu x
606 !j grid%msfv x
607 !j grid%sina x
608 !j grid%cosa x
609 !j grid%e x
610 !j grid%f x
611 !
612 ! 4D variables
613 !
614 ! moist x
615 ! chem x
616 !scalar x
617
618 !--------------------------------------------------------------
619
620 #ifdef DM_PARALLEL
621 # include "HALO_EM_INIT_1.inc"
622 # include "HALO_EM_INIT_2.inc"
623 # include "HALO_EM_INIT_3.inc"
624 # include "HALO_EM_INIT_4.inc"
625 # include "HALO_EM_INIT_5.inc"
626 # include "PERIOD_BDY_EM_INIT.inc"
627 # include "PERIOD_BDY_EM_MOIST.inc"
628 # include "PERIOD_BDY_EM_CHEM.inc"
629 #endif
630
631
632 CALL set_physical_bc3d( grid%em_u_1 , 'U' , config_flags , &
633 ids , ide , jds , jde , kds , kde , &
634 ims , ime , jms , jme , kms , kme , &
635 its , ite , jts , jte , kts , kte , &
636 its , ite , jts , jte , kts , kte )
637 CALL set_physical_bc3d( grid%em_u_2 , 'U' , config_flags , &
638 ids , ide , jds , jde , kds , kde , &
639 ims , ime , jms , jme , kms , kme , &
640 its , ite , jts , jte , kts , kte , &
641 its , ite , jts , jte , kts , kte )
642
643 CALL set_physical_bc3d( grid%em_v_1 , 'V' , config_flags , &
644 ids , ide , jds , jde , kds , kde , &
645 ims , ime , jms , jme , kms , kme , &
646 its , ite , jts , jte , kts , kte , &
647 its , ite , jts , jte , kts , kte )
648 CALL set_physical_bc3d( grid%em_v_2 , 'V' , config_flags , &
649 ids , ide , jds , jde , kds , kde , &
650 ims , ime , jms , jme , kms , kme , &
651 its , ite , jts , jte , kts , kte , &
652 its , ite , jts , jte , kts , kte )
653
654 ! set kinematic condition for w
655
656 CALL set_physical_bc2d( grid%ht , 'r' , config_flags , &
657 ids , ide , jds , jde , &
658 ims , ime , jms , jme , &
659 its , ite , jts , jte , &
660 its , ite , jts , jte )
661
662 IF ( .not. config_flags%restart ) THEN
663 CALL set_w_surface( config_flags, &
664 grid%em_w_1, grid%ht, grid%em_u_1, grid%em_v_1, grid%cf1, &
665 grid%cf2, grid%cf3, grid%rdx, grid%rdy, grid%msft, &
666 ids, ide, jds, jde, kds, kde, &
667 ips, ipe, jps, jpe, kps, kpe, &
668 its, ite, jts, jte, kts, kte, &
669 ims, ime, jms, jme, kms, kme )
670 CALL set_w_surface( config_flags, &
671 grid%em_w_2, grid%ht, grid%em_u_2, grid%em_v_2, grid%cf1, &
672 grid%cf2, grid%cf3, grid%rdx, grid%rdy, grid%msft, &
673 ids, ide, jds, jde, kds, kde, &
674 ips, ipe, jps, jpe, kps, kpe, &
675 its, ite, jts, jte, kts, kte, &
676 ims, ime, jms, jme, kms, kme )
677 END IF
678
679 ! finished setting kinematic condition for w at the surface
680
681 CALL set_physical_bc3d( grid%em_w_1 , 'W' , config_flags , &
682 ids , ide , jds , jde , kds , kde , &
683 ims , ime , jms , jme , kms , kme , &
684 its , ite , jts , jte , kts , kte , &
685 its , ite , jts , jte , kts , kte )
686 CALL set_physical_bc3d( grid%em_w_2 , 'W' , config_flags , &
687 ids , ide , jds , jde , kds , kde , &
688 ims , ime , jms , jme , kms , kme , &
689 its , ite , jts , jte , kts , kte , &
690 its , ite , jts , jte , kts , kte )
691
692 CALL set_physical_bc3d( grid%em_ph_1 , 'W' , config_flags , &
693 ids , ide , jds , jde , kds , kde , &
694 ims , ime , jms , jme , kms , kme , &
695 its , ite , jts , jte , kts , kte , &
696 its , ite , jts , jte , kts , kte )
697
698 CALL set_physical_bc3d( grid%em_ph_2 , 'W' , config_flags , &
699 ids , ide , jds , jde , kds , kde , &
700 ims , ime , jms , jme , kms , kme , &
701 its , ite , jts , jte , kts , kte , &
702 its , ite , jts , jte , kts , kte )
703
704 CALL set_physical_bc3d( grid%em_t_1 , 't' , config_flags , &
705 ids , ide , jds , jde , kds , kde , &
706 ims , ime , jms , jme , kms , kme , &
707 its , ite , jts , jte , kts , kte , &
708 its , ite , jts , jte , kts , kte )
709
710 CALL set_physical_bc3d( grid%em_t_2 , 't' , config_flags , &
711 ids , ide , jds , jde , kds , kde , &
712 ims , ime , jms , jme , kms , kme , &
713 its , ite , jts , jte , kts , kte , &
714 its , ite , jts , jte , kts , kte )
715
716 CALL set_physical_bc2d( grid%em_mu_1, 't' , config_flags , &
717 ids , ide , jds , jde , &
718 ims , ime , jms , jme , &
719 its , ite , jts , jte , &
720 its , ite , jts , jte )
721 CALL set_physical_bc2d( grid%em_mu_2, 't' , config_flags , &
722 ids , ide , jds , jde , &
723 ims , ime , jms , jme , &
724 its , ite , jts , jte , &
725 its , ite , jts , jte )
726 CALL set_physical_bc2d( grid%em_mub , 't' , config_flags , &
727 ids , ide , jds , jde , &
728 ims , ime , jms , jme , &
729 its , ite , jts , jte , &
730 its , ite , jts , jte )
731 CALL set_physical_bc2d( grid%em_mu0 , 't' , config_flags , &
732 ids , ide , jds , jde , &
733 ims , ime , jms , jme , &
734 its , ite , jts , jte , &
735 its , ite , jts , jte )
736
737
738 CALL set_physical_bc3d( grid%em_phb , 'W' , config_flags , &
739 ids , ide , jds , jde , kds , kde , &
740 ims , ime , jms , jme , kms , kme , &
741 its , ite , jts , jte , kts , kte , &
742 its , ite , jts , jte , kts , kte )
743 CALL set_physical_bc3d( grid%em_ph0 , 'W' , config_flags , &
744 ids , ide , jds , jde , kds , kde , &
745 ims , ime , jms , jme , kms , kme , &
746 its , ite , jts , jte , kts , kte , &
747 its , ite , jts , jte , kts , kte )
748 CALL set_physical_bc3d( grid%em_php , 'W' , config_flags , &
749 ids , ide , jds , jde , kds , kde , &
750 ims , ime , jms , jme , kms , kme , &
751 its , ite , jts , jte , kts , kte , &
752 its , ite , jts , jte , kts , kte )
753
754 CALL set_physical_bc3d( grid%em_pb , 't' , config_flags , &
755 ids , ide , jds , jde , kds , kde , &
756 ims , ime , jms , jme , kms , kme , &
757 its , ite , jts , jte , kts , kte , &
758 its , ite , jts , jte , kts , kte )
759 CALL set_physical_bc3d( grid%em_al , 't' , config_flags , &
760 ids , ide , jds , jde , kds , kde , &
761 ims , ime , jms , jme , kms , kme , &
762 its , ite , jts , jte , kts , kte , &
763 its , ite , jts , jte , kts , kte )
764 CALL set_physical_bc3d( grid%em_alt , 't' , config_flags , &
765 ids , ide , jds , jde , kds , kde , &
766 ims , ime , jms , jme , kms , kme , &
767 its , ite , jts , jte , kts , kte , &
768 its , ite , jts , jte , kts , kte )
769 CALL set_physical_bc3d( grid%em_alb , 't' , config_flags , &
770 ids , ide , jds , jde , kds , kde , &
771 ims , ime , jms , jme , kms , kme , &
772 its , ite , jts , jte , kts , kte , &
773 its , ite , jts , jte , kts , kte )
774 CALL set_physical_bc3d(grid%em_t_init, 't' , config_flags , &
775 ids , ide , jds , jde , kds , kde , &
776 ims , ime , jms , jme , kms , kme , &
777 its , ite , jts , jte , kts , kte , &
778 its , ite , jts , jte , kts , kte )
779
780 IF (num_moist > 0) THEN
781
782 ! use of (:,:,:,loop) not efficient on DEC, but (ims,kms,jms,loop) not portable to SGI/Cray
783
784 loop_3d_m : DO loop = 1 , num_moist
785 CALL set_physical_bc3d( moist(:,:,:,loop) , 'r' , config_flags , &
786 ids , ide , jds , jde , kds , kde , &
787 ims , ime , jms , jme , kms , kme , &
788 its , ite , jts , jte , kts , kte , &
789 its , ite , jts , jte , kts , kte )
790 END DO loop_3d_m
791
792 ENDIF
793
794 #ifdef WRF_CHEM
795 ! Zero out the Mie arrays so they do not cause instabilities when not calling Fast-J.
796 if( .NOT. config_flags%restart ) then
797 do j=jts,jte
798 do k=kts,kte
799 do i=its,ite
800 grid%tauaer1(i,k,j) = 0.
801 grid%tauaer2(i,k,j) = 0.
802 grid%tauaer3(i,k,j) = 0.
803 grid%tauaer4(i,k,j) = 0.
804 grid%gaer1(i,k,j) = 0.
805 grid%gaer2(i,k,j) = 0.
806 grid%gaer3(i,k,j) = 0.
807 grid%gaer4(i,k,j) = 0.
808 grid%waer1(i,k,j) = 0.
809 grid%waer2(i,k,j) = 0.
810 grid%waer3(i,k,j) = 0.
811 grid%waer4(i,k,j) = 0.
812 end do
813 end do
814 end do
815 end if
816 #endif
817
818 IF (num_chem >= PARAM_FIRST_SCALAR ) THEN
819 #ifdef WRF_CHEM
820 !
821 ! we do this here, so we only have one chem_init routine for either core....
822 !
823 do j=jts,min(jte,jde-1)
824 do i=its,min(ite,ide-1)
825 do k=kts,kte
826 z_at_w(i,k,j)=(grid%em_ph_2(i,k,j)+grid%em_phb(i,k,j))/g
827 enddo
828 do k=kts,min(kte,kde-1)
829 tempfac=(grid%em_t_1(i,k,j) + t0)*((grid%em_p(i,k,j) + grid%em_pb(i,k,j))/p1000mb)**rcp
830 convfac(i,k,j) = (grid%em_p(i,k,j)+grid%em_pb(i,k,j))/rgasuniv/tempfac
831 enddo
832 enddo
833 enddo
834
835 CALL chem_init (grid%id,chem,grid%dt,grid%bioemdt,grid%photdt, &
836 grid%chemdt, &
837 grid%stepbioe,grid%stepphot,grid%stepchem, &
838 z_at_w,g,grid%aerwrf,config_flags, &
839 grid%em_alt,grid%em_t_1,grid%em_p,convfac, &
840 grid%tauaer1,grid%tauaer2,grid%tauaer3,grid%tauaer4, &
841 grid%gaer1,grid%gaer2,grid%gaer3,grid%gaer4, &
842 grid%waer1,grid%waer2,grid%waer3,grid%waer4, &
843 grid%pm2_5_dry,grid%pm2_5_water,grid%pm2_5_dry_ec, &
844 grid%chem_in_opt, &
845 ids , ide , jds , jde , kds , kde , &
846 ims , ime , jms , jme , kms , kme , &
847 its , ite , jts , jte , kts , kte )
848
849 !
850 ! calculate initial pm
851 !
852 ! print *,'calculating initial pm'
853 select case (config_flags%chem_opt)
854 case (RADM2SORG, RACMSORG)
855 call sum_pm_sorgam ( &
856 grid%em_alt, chem, grid%h2oaj, grid%h2oai, &
857 grid%pm2_5_dry, grid%pm2_5_water, grid%pm2_5_dry_ec, grid%pm10, &
858 ids,ide, jds,jde, kds,kde, &
859 ims,ime, jms,jme, kms,kme, &
860 its,ite, jts,jte, kts,kte )
861
862 case (CBMZ_MOSAIC_AA, CBMZ_MOSAIC_BB)
863 call sum_pm_mosaic ( &
864 grid%em_alt, chem, &
865 grid%pm2_5_dry, grid%pm2_5_water, grid%pm2_5_dry_ec, grid%pm10, &
866 ids,ide, jds,jde, kds,kde, &
867 ims,ime, jms,jme, kms,kme, &
868 its,ite, jts,jte, kts,kte )
869
870 case default
871 do j=jts,min(jte,jde-1)
872 do k=kts,min(kte,kde-1)
873 do i=its,min(ite,ide-1)
874 grid%pm2_5_dry(i,k,j) = 0.
875 grid%pm2_5_water(i,k,j) = 0.
876 grid%pm2_5_dry_ec(i,k,j) = 0.
877 grid%pm10(i,k,j) = 0.
878 enddo
879 enddo
880 enddo
881 end select
882 #endif
883
884 ! use of (:,:,:,loop) not efficient on DEC, but (ims,kms,jms,loop) not portable to SGI/Cray
885
886 loop_3d_c : DO loop = PARAM_FIRST_SCALAR , num_chem
887 CALL set_physical_bc3d( chem(:,:,:,loop) , 'r' , config_flags , &
888 ids , ide , jds , jde , kds , kde , &
889 ims , ime , jms , jme , kms , kme , &
890 its , ite , jts , jte , kts , kte , &
891 its , ite , jts , jte , kts , kte )
892 END DO loop_3d_c
893
894 ENDIF
895
896 CALL set_physical_bc2d( grid%msft , 'r' , config_flags , &
897 ids , ide , jds , jde , &
898 ims , ime , jms , jme , &
899 its , ite , jts , jte , &
900 its , ite , jts , jte )
901 CALL set_physical_bc2d( grid%msfu , 'x' , config_flags , &
902 ids , ide , jds , jde , &
903 ims , ime , jms , jme , &
904 its , ite , jts , jte , &
905 its , ite , jts , jte )
906 CALL set_physical_bc2d( grid%msfv , 'y' , config_flags , &
907 ids , ide , jds , jde , &
908 ims , ime , jms , jme , &
909 its , ite , jts , jte , &
910 its , ite , jts , jte )
911 CALL set_physical_bc2d( grid%sina , 'r' , config_flags , &
912 ids , ide , jds , jde , &
913 ims , ime , jms , jme , &
914 its , ite , jts , jte , &
915 its , ite , jts , jte )
916 CALL set_physical_bc2d( grid%cosa , 'r' , config_flags , &
917 ids , ide , jds , jde , &
918 ims , ime , jms , jme , &
919 its , ite , jts , jte , &
920 its , ite , jts , jte )
921 CALL set_physical_bc2d( grid%e , 'r' , config_flags , &
922 ids , ide , jds , jde , &
923 ims , ime , jms , jme , &
924 its , ite , jts , jte , &
925 its , ite , jts , jte )
926 CALL set_physical_bc2d( grid%f , 'r' , config_flags , &
927 ids , ide , jds , jde , &
928 ims , ime , jms , jme , &
929 its , ite , jts , jte , &
930 its , ite , jts , jte )
931
932 #ifdef DM_PARALLEL
933 # include "HALO_EM_INIT_1.inc"
934 # include "HALO_EM_INIT_2.inc"
935 # include "HALO_EM_INIT_3.inc"
936 # include "HALO_EM_INIT_4.inc"
937 # include "HALO_EM_INIT_5.inc"
938 # include "PERIOD_BDY_EM_INIT.inc"
939 # include "PERIOD_BDY_EM_MOIST.inc"
940 # include "PERIOD_BDY_EM_CHEM.inc"
941 #endif
942
943 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: Returning' )
944
945 RETURN
946
947 END SUBROUTINE start_domain_em
948