start_em.F
References to this file elsewhere.
1 !-------------------------------------------------------------------
2
3 SUBROUTINE start_domain_em ( grid, allowed_to_read &
4 ! Actual arguments generated from Registry
5 # include "em_dummy_new_args.inc"
6 !
7 )
8
9 USE module_domain
10 USE module_dm
11 ! USE module_io_domain
12 USE module_state_description
13 USE module_model_constants
14 USE module_bc
15 USE module_bc_em
16 ! USE module_timing
17 USE module_configure
18 USE module_tiles
19
20 USE module_physics_init
21 #ifdef WRF_CHEM
22 USE module_aerosols_sorgam, only: sum_pm_sorgam
23 USE module_mosaic_driver, only: sum_pm_mosaic
24 #endif
25
26 #ifdef DM_PARALLEL
27 USE module_dm
28 #endif
29
30 !!debug
31 !USE module_compute_geop
32
33 USE module_model_constants
34 IMPLICIT NONE
35 ! Input data.
36 TYPE (domain) :: grid
37
38 LOGICAL , INTENT(IN) :: allowed_to_read
39
40 ! Definitions of dummy arguments to this routine (generated from Registry).
41 # include "em_dummy_new_decl.inc"
42
43 ! Structure that contains run-time configuration (namelist) data for domain
44 TYPE (grid_config_rec_type) :: config_flags
45
46 ! Local data
47 INTEGER :: &
48 ids, ide, jds, jde, kds, kde, &
49 ims, ime, jms, jme, kms, kme, &
50 ips, ipe, jps, jpe, kps, kpe, &
51 its, ite, jts, jte, kts, kte, &
52 ij,i,j,k,ii,jj,kk,loop,error,l
53
54 INTEGER :: i_m
55
56 REAL :: p00, t00, a, p_surf, pd_surf
57 #ifdef WRF_CHEM
58 REAL RGASUNIV ! universal gas constant [ J/mol-K ]
59 PARAMETER ( RGASUNIV = 8.314510 )
60 REAL,DIMENSION(grid%sm31:grid%em31,grid%sm32:grid%em32,grid%sm33:grid%em33) :: &
61 z_at_w,convfac
62 REAL :: tempfac
63 #endif
64
65 REAL :: qvf1, qvf2, qvf
66 REAL :: MPDT
67 REAL :: spongeweight
68 LOGICAL :: first_trip_for_this_domain, start_of_simulation
69 #ifndef WRF_CHEM
70 REAL,ALLOCATABLE,DIMENSION(:,:,:) :: cldfra_old
71 #endif
72
73 REAL :: lat1 , lat2 , lat3 , lat4
74 REAL :: lon1 , lon2 , lon3 , lon4
75 INTEGER :: num_points_lat_lon , iloc , jloc
76 CHARACTER (LEN=132) :: message
77
78 ! Needed by some comm layers, e.g. RSL. If needed, nmm_data_calls.inc is
79 ! generated from the registry. The definition of REGISTER_I1 allows
80 ! I1 data to be communicated in this routine if necessary.
81 #ifdef DM_PARALLEL
82 # include "em_data_calls.inc"
83 #endif
84 CALL get_ijk_from_grid ( grid , &
85 ids, ide, jds, jde, kds, kde, &
86 ims, ime, jms, jme, kms, kme, &
87 ips, ipe, jps, jpe, kps, kpe )
88
89 #ifdef TEST_FOR_IJK_IN_RSL_LITE
90 grid%xxx_1 = 0.
91 grid%xxx_2 = 1.
92 do k = kps, kpe
93 do j = MAX(jds+1,jps), MIN(jde-2,jpe)
94 do i = MAX(ids+1,ips), MIN(ide-2,ipe)
95 grid%xxx_2(i,j,k) = 0.
96 enddo
97 enddo
98 enddo
99 return
100 #endif
101
102
103 kts = kps ; kte = kpe ! note that tile is entire patch
104 its = ips ; ite = ipe ! note that tile is entire patch
105 jts = jps ; jte = jpe ! note that tile is entire patch
106 #ifndef WRF_CHEM
107 ALLOCATE(CLDFRA_OLD(IMS:IME,KMS:KME,JMS:JME),STAT=I) ; CLDFRA_OLD = 0.
108 #endif
109 CALL model_to_grid_config_rec ( grid%id , model_config_rec , config_flags )
110
111 IF ( ( MOD (ide-ids,config_flags%parent_grid_ratio) .NE. 0 ) .OR. &
112 ( MOD (jde-jds,config_flags%parent_grid_ratio) .NE. 0 ) ) THEN
113 WRITE(message, FMT='("Nested dimensions are illegal for domain ",I2,": Both &
114 &MOD(",I4,"-",I1,",",I2,") and MOD(",I4,"-",I1,",",I2,") must = 0" )') &
115 grid%id,ide,ids,config_flags%parent_grid_ratio,jde,jds,config_flags%parent_grid_ratio
116 CALL wrf_error_fatal ( message )
117 END IF
118
119 ! here we check to see if the boundary conditions are set properly
120
121 CALL boundary_condition_check( config_flags, bdyzone, error, grid%id )
122
123 !kludge - need to stop CG from resetting precip and phys tendencies to zero
124 ! when we are in here due to a nest being spawned, we want to still
125 ! recompute the base state, but that is about it
126 ! This is temporary and will need to be changed when grid%itimestep is removed.
127
128 IF ( grid%itimestep .EQ. 0 ) THEN
129 first_trip_for_this_domain = .TRUE.
130 ELSE
131 first_trip_for_this_domain = .FALSE.
132 END IF
133
134 IF ( .not. ( config_flags%restart .or. grid%moved ) ) THEN
135 grid%itimestep=0
136 ENDIF
137
138 IF ( config_flags%restart .or. grid%moved ) THEN
139 first_trip_for_this_domain = .TRUE.
140 ENDIF
141
142 IF (config_flags%specified) THEN
143 !
144 ! Arrays for specified boundary conditions
145 ! wig: Add a combined exponential+linear weight on the mother boundaries
146 ! following code changes by Ruby Leung. For the nested grid, there
147 ! appears to be some problems when a sponge is used. The points where
148 ! processors meet have problematic values.
149
150 DO loop = grid%spec_zone + 1, grid%spec_zone + grid%relax_zone
151 grid%fcx(loop) = 0.1 / grid%dt * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
152 grid%gcx(loop) = 1.0 / grid%dt / 50. * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
153 ! spongeweight=exp(-(loop-2)/3.)
154 ! grid%fcx(loop) = grid%fcx(loop)*spongeweight
155 ! grid%gcx(loop) = grid%gcx(loop)*spongeweight
156 ENDDO
157
158 ELSE IF (config_flags%nested) THEN
159 !
160 ! Arrays for nested boundary conditions
161
162 DO loop = grid%spec_zone + 1, grid%spec_zone + grid%relax_zone
163 grid%fcx(loop) = 0.1 / grid%dt * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
164 grid%gcx(loop) = 1.0 / grid%dt / 50. * (grid%spec_zone + grid%relax_zone - loop) / (grid%relax_zone - 1)
165 ! spongeweight=exp(-(loop-2)/3.)
166 ! grid%fcx(loop) = grid%fcx(loop)*spongeweight
167 ! grid%gcx(loop) = grid%gcx(loop)*spongeweight
168 ! grid%fcx(loop) = 0.
169 ! grid%gcx(loop) = 0.
170 ENDDO
171
172 grid%dtbc = 0.
173
174 ENDIF
175
176 IF ( ( grid%id .NE. 1 ) .AND. ( .NOT. config_flags%input_from_file ) ) THEN
177
178 ! Every time a domain starts or every time a domain moves, this routine is called. We want
179 ! the center (middle) lat/lon of the grid for the metacode. The lat/lon values are
180 ! defined at mass points. Depending on the even/odd points in the SN and WE directions,
181 ! we end up with the middle point as either 1 point or an average of either 2 or 4 points.
182 ! Add to this, the need to make sure that we are on the correct patch to retrieve the
183 ! value of the lat/lon, AND that the lat/lons (for an average) may not all be on the same
184 ! patch. Once we find the correct value for lat lon, we need to keep it around on all patches,
185 ! which is where the wrf_dm_min_real calls come in.
186 ! If this is the most coarse domain, we do not go in here. Also, if there is an input file
187 ! (which has the right values for the middle lat/lon) we do not go in this IF test.
188
189 IF ( ( MOD(ide,2) .EQ. 0 ) .AND. ( MOD(jde,2) .EQ. 0 ) ) THEN
190 num_points_lat_lon = 1
191 iloc = ide/2
192 jloc = jde/2
193 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
194 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
195 lat1 = grid%xlat (iloc,jloc)
196 lon1 = grid%xlong(iloc,jloc)
197 ELSE
198 lat1 = 99999.
199 lon1 = 99999.
200 END IF
201 lat1 = wrf_dm_min_real ( lat1 )
202 lon1 = wrf_dm_min_real ( lon1 )
203 CALL nl_set_cen_lat ( grid%id , lat1 )
204 CALL nl_set_cen_lon ( grid%id , lon1 )
205 ELSE IF ( ( MOD(ide,2) .NE. 0 ) .AND. ( MOD(jde,2) .EQ. 0 ) ) THEN
206 num_points_lat_lon = 2
207 iloc = (ide-1)/2
208 jloc = jde /2
209 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
210 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
211 lat1 = grid%xlat (iloc,jloc)
212 lon1 = grid%xlong(iloc,jloc)
213 ELSE
214 lat1 = 99999.
215 lon1 = 99999.
216 END IF
217 lat1 = wrf_dm_min_real ( lat1 )
218 lon1 = wrf_dm_min_real ( lon1 )
219
220 iloc = (ide+1)/2
221 jloc = jde /2
222 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
223 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
224 lat2 = grid%xlat (iloc,jloc)
225 lon2 = grid%xlong(iloc,jloc)
226 ELSE
227 lat2 = 99999.
228 lon2 = 99999.
229 END IF
230 lat2 = wrf_dm_min_real ( lat2 )
231 lon2 = wrf_dm_min_real ( lon2 )
232
233 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 ) * 0.50 )
234 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 ) * 0.50 )
235 ELSE IF ( ( MOD(ide,2) .EQ. 0 ) .AND. ( MOD(jde,2) .NE. 0 ) ) THEN
236 num_points_lat_lon = 2
237 iloc = ide /2
238 jloc = (jde-1)/2
239 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
240 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
241 lat1 = grid%xlat (iloc,jloc)
242 lon1 = grid%xlong(iloc,jloc)
243 ELSE
244 lat1 = 99999.
245 lon1 = 99999.
246 END IF
247 lat1 = wrf_dm_min_real ( lat1 )
248 lon1 = wrf_dm_min_real ( lon1 )
249
250 iloc = ide /2
251 jloc = (jde+1)/2
252 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
253 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
254 lat2 = grid%xlat (iloc,jloc)
255 lon2 = grid%xlong(iloc,jloc)
256 ELSE
257 lat2 = 99999.
258 lon2 = 99999.
259 END IF
260 lat2 = wrf_dm_min_real ( lat2 )
261 lon2 = wrf_dm_min_real ( lon2 )
262
263 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 ) * 0.50 )
264 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 ) * 0.50 )
265 ELSE IF ( ( MOD(ide,2) .NE. 0 ) .AND. ( MOD(jde,2) .NE. 0 ) ) THEN
266 num_points_lat_lon = 4
267 iloc = (ide-1)/2
268 jloc = (jde-1)/2
269 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
270 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
271 lat1 = grid%xlat (iloc,jloc)
272 lon1 = grid%xlong(iloc,jloc)
273 ELSE
274 lat1 = 99999.
275 lon1 = 99999.
276 END IF
277 lat1 = wrf_dm_min_real ( lat1 )
278 lon1 = wrf_dm_min_real ( lon1 )
279
280 iloc = (ide+1)/2
281 jloc = (jde-1)/2
282 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
283 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
284 lat2 = grid%xlat (iloc,jloc)
285 lon2 = grid%xlong(iloc,jloc)
286 ELSE
287 lat2 = 99999.
288 lon2 = 99999.
289 END IF
290 lat2 = wrf_dm_min_real ( lat2 )
291 lon2 = wrf_dm_min_real ( lon2 )
292
293 iloc = (ide-1)/2
294 jloc = (jde+1)/2
295 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
296 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
297 lat3 = grid%xlat (iloc,jloc)
298 lon3 = grid%xlong(iloc,jloc)
299 ELSE
300 lat3 = 99999.
301 lon3 = 99999.
302 END IF
303 lat3 = wrf_dm_min_real ( lat3 )
304 lon3 = wrf_dm_min_real ( lon3 )
305
306 iloc = (ide+1)/2
307 jloc = (jde+1)/2
308 IF ( ( ips .LE. iloc ) .AND. ( ipe .GE. iloc ) .AND. &
309 ( jps .LE. jloc ) .AND. ( jpe .GE. jloc ) ) THEN
310 lat4 = grid%xlat (iloc,jloc)
311 lon4 = grid%xlong(iloc,jloc)
312 ELSE
313 lat4 = 99999.
314 lon4 = 99999.
315 END IF
316 lat4 = wrf_dm_min_real ( lat4 )
317 lon4 = wrf_dm_min_real ( lon4 )
318
319 CALL nl_set_cen_lat ( grid%id , ( lat1 + lat2 + lat3 + lat4 ) * 0.25 )
320 CALL nl_set_cen_lon ( grid%id , ( lon1 + lon2 + lon3 + lon4 ) * 0.25 )
321 END IF
322 END IF
323
324 IF ( .NOT. config_flags%restart .AND. &
325 (( config_flags%input_from_hires ) .OR. ( config_flags%input_from_file ))) THEN
326
327 IF ( config_flags%map_proj .EQ. 0 ) THEN
328 CALL wrf_error_fatal ( 'start_domain: Idealized case cannot have a separate nested input file' )
329 END IF
330
331 CALL nl_get_base_pres ( 1 , p00 )
332 CALL nl_get_base_temp ( 1 , t00 )
333 CALL nl_get_base_lapse ( 1 , a )
334
335 ! Base state potential temperature and inverse density (alpha = 1/rho) from
336 ! the half eta levels and the base-profile surface pressure. Compute 1/rho
337 ! from equation of state. The potential temperature is a perturbation from t0.
338
339 DO j = jts, MIN(jte,jde-1)
340 DO i = its, MIN(ite,ide-1)
341
342 ! Base state pressure is a function of eta level and terrain, only, plus
343 ! the hand full of constants: p00 (sea level pressure, Pa), t00 (sea level
344 ! temperature, K), and A (temperature difference, from 1000 mb to 300 mb, K).
345
346 p_surf = p00 * EXP ( -t00/a + ( (t00/a)**2 - 2.*g*grid%ht(i,j)/a/r_d ) **0.5 )
347
348 DO k = 1, kte-1
349 grid%em_pb(i,k,j) = grid%em_znu(k)*(p_surf - grid%p_top) + grid%p_top
350 grid%em_t_init(i,k,j) = (t00 + A*LOG(grid%em_pb(i,k,j)/p00))*(p00/grid%em_pb(i,k,j))**(r_d/cp) - t0
351 grid%em_alb(i,k,j) = (r_d/p1000mb)*(grid%em_t_init(i,k,j)+t0)*(grid%em_pb(i,k,j)/p1000mb)**cvpm
352 END DO
353
354 ! Base state mu is defined as base state surface pressure minus grid%p_top
355
356 grid%em_mub(i,j) = p_surf - grid%p_top
357
358 ! Integrate base geopotential, starting at terrain elevation. This assures that
359 ! the base state is in exact hydrostatic balance with respect to the model equations.
360 ! This field is on full levels.
361
362 grid%em_phb(i,1,j) = grid%ht(i,j) * g
363 DO k = 2,kte
364 grid%em_phb(i,k,j) = grid%em_phb(i,k-1,j) - grid%em_dnw(k-1)*grid%em_mub(i,j)*grid%em_alb(i,k-1,j)
365 END DO
366 END DO
367 END DO
368
369 ENDIF
370
371 IF(.not.config_flags%restart)THEN
372
373 ! if this is for a nested domain, the defined/interpolated fields are the _2
374
375 IF ( first_trip_for_this_domain ) THEN
376
377 ! data that is expected to be zero must be explicitly initialized as such
378 grid%h_diabatic = 0.
379
380 DO j = jts,min(jte,jde-1)
381 DO k = kts,kte-1
382 DO i = its, min(ite,ide-1)
383 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
384 grid%em_t_1(i,k,j)=grid%em_t_2(i,k,j)
385 ENDIF
386 ENDDO
387 ENDDO
388 ENDDO
389
390 DO j = jts,min(jte,jde-1)
391 DO i = its, min(ite,ide-1)
392 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
393 grid%em_mu_1(i,j)=grid%em_mu_2(i,j)
394 ENDIF
395 ENDDO
396 ENDDO
397 END IF
398
399 ! reconstitute base-state fields
400
401 IF(config_flags%max_dom .EQ. 1)THEN
402 ! with single domain, grid%em_t_init from wrfinput is OK to use
403 DO j = jts,min(jte,jde-1)
404 DO k = kts,kte-1
405 DO i = its, min(ite,ide-1)
406 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
407 grid%em_pb(i,k,j) = grid%em_znu(k)*grid%em_mub(i,j)+grid%p_top
408 grid%em_alb(i,k,j) = (r_d/p1000mb)*(grid%em_t_init(i,k,j)+t0)*(grid%em_pb(i,k,j)/p1000mb)**cvpm
409 ENDIF
410 ENDDO
411 ENDDO
412 ENDDO
413 ELSE
414 ! with nests, grid%em_t_init generally needs recomputations (since it is not interpolated)
415 DO j = jts,min(jte,jde-1)
416 DO k = kts,kte-1
417 DO i = its, min(ite,ide-1)
418 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
419 grid%em_pb(i,k,j) = grid%em_znu(k)*grid%em_mub(i,j)+grid%p_top
420 grid%em_alb(i,k,j) = -grid%em_rdnw(k)*(grid%em_phb(i,k+1,j)-grid%em_phb(i,k,j))/grid%em_mub(i,j)
421 grid%em_t_init(i,k,j) = grid%em_alb(i,k,j)*(p1000mb/r_d)/((grid%em_pb(i,k,j)/p1000mb)**cvpm) - t0
422 ENDIF
423 ENDDO
424 ENDDO
425 ENDDO
426 ENDIF
427
428 DO j = jts,min(jte,jde-1)
429
430 k = kte-1
431 DO i = its, min(ite,ide-1)
432 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
433 qvf1 = 0.5*(moist(i,k,j,P_QV)+moist(i,k,j,P_QV))
434 qvf2 = 1./(1.+qvf1)
435 qvf1 = qvf1*qvf2
436 grid%em_p(i,k,j) = - 0.5*(grid%em_mu_1(i,j)+qvf1*grid%em_mub(i,j))/grid%em_rdnw(k)/qvf2
437 qvf = 1. + rvovrd*moist(i,k,j,P_QV)
438 grid%em_alt(i,k,j) = (r_d/p1000mb)*(grid%em_t_1(i,k,j)+t0)*qvf*(((grid%em_p(i,k,j)+grid%em_pb(i,k,j))/p1000mb)**cvpm)
439 grid%em_al(i,k,j) = grid%em_alt(i,k,j) - grid%em_alb(i,k,j)
440 ENDIF
441 ENDDO
442
443 DO k = kte-2, 1, -1
444 DO i = its, min(ite,ide-1)
445 IF ( grid%imask_nostag(i,j) .EQ. 1 ) THEN
446 qvf1 = 0.5*(moist(i,k,j,P_QV)+moist(i,k+1,j,P_QV))
447 qvf2 = 1./(1.+qvf1)
448 qvf1 = qvf1*qvf2
449 grid%em_p(i,k,j) = grid%em_p(i,k+1,j) - (grid%em_mu_1(i,j) + qvf1*grid%em_mub(i,j))/qvf2/grid%em_rdn(k+1)
450 qvf = 1. + rvovrd*moist(i,k,j,P_QV)
451 grid%em_alt(i,k,j) = (r_d/p1000mb)*(grid%em_t_1(i,k,j)+t0)*qvf* &
452 (((grid%em_p(i,k,j)+grid%em_pb(i,k,j))/p1000mb)**cvpm)
453 grid%em_al(i,k,j) = grid%em_alt(i,k,j) - grid%em_alb(i,k,j)
454 ENDIF
455 ENDDO
456 ENDDO
457
458 ENDDO
459
460 ENDIF
461
462 IF ( ( grid%id .NE. 1 ) .AND. .NOT. ( config_flags%restart ) .AND. &
463 ( ( config_flags%input_from_hires ) .OR. ( config_flags%input_from_file ) ) ) THEN
464 DO j = jts, MIN(jte,jde-1)
465 DO i = its, MIN(ite,ide-1)
466 grid%em_mu_2(i,j) = grid%em_mu_2(i,j) + grid%em_al(i,1,j) / ( grid%em_alt(i,1,j) * grid%em_alb(i,1,j) ) * &
467 g * ( grid%ht(i,j) - grid%ht_fine(i,j) )
468 END DO
469 END DO
470 DO j = jts,min(jte,jde-1)
471 DO i = its, min(ite,ide-1)
472 grid%em_mu_1(i,j)=grid%em_mu_2(i,j)
473 ENDDO
474 ENDDO
475
476 END IF
477
478 IF ( first_trip_for_this_domain ) THEN
479
480 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: Before call to phy_init' )
481
482 ! namelist MPDT does not exist yet, so set it here
483 ! MPDT is the call frequency for microphysics in minutes (0 means every step)
484 MPDT = 0.
485
486 ! set GMT outside of phy_init because phy_init may not be called on this
487 ! process if, for example, it is a moving nest and if this part of the domain is not
488 ! being initialized (not the leading edge).
489 CALL domain_setgmtetc( grid, start_of_simulation )
490
491 CALL set_tiles ( grid , grid%imask_nostag, ims, ime, jms, jme, ips, ipe, jps, jpe )
492
493 ! Phy_init is not necessarily thread-safe; do not multi-thread this loop.
494 ! The tiling is to handle the fact that we may be masking off part of the computation.
495 DO ij = 1, grid%num_tiles
496
497 CALL phy_init ( grid%id , config_flags, grid%DT, grid%RESTART, grid%em_znw, grid%em_znu, &
498 grid%p_top, grid%tsk, grid%RADT,grid%BLDT,grid%CUDT, MPDT, &
499 grid%rthcuten, grid%rqvcuten, grid%rqrcuten, &
500 grid%rqccuten, grid%rqscuten, grid%rqicuten, &
501 grid%rublten,grid%rvblten,grid%rthblten, &
502 grid%rqvblten,grid%rqcblten,grid%rqiblten, &
503 grid%rthraten,grid%rthratenlw,grid%rthratensw, &
504 grid%stepbl,grid%stepra,grid%stepcu, &
505 grid%w0avg, grid%rainnc, grid%rainc, grid%raincv, grid%rainncv, &
506 grid%nca,grid%swrad_scat, &
507 grid%cldefi,grid%lowlyr, &
508 grid%mass_flux, &
509 grid%rthften, grid%rqvften, &
510 grid%cldfra, &
511 #ifdef WRF_CHEM
512 grid%cldfra_old, &
513 #endif
514 #ifndef WRF_CHEM
515 cldfra_old, &
516 #endif
517 grid%glw,grid%gsw,grid%emiss,grid%lu_index, &
518 grid%landuse_ISICE, grid%landuse_LUCATS, &
519 grid%landuse_LUSEAS, grid%landuse_ISN, &
520 grid%lu_state, &
521 grid%xlat,grid%xlong,grid%albedo,grid%albbck,grid%GMT,grid%JULYR,grid%JULDAY, &
522 grid%levsiz, num_ozmixm, num_aerosolc, grid%paerlev, &
523 grid%tmn,grid%xland,grid%znt,grid%z0,grid%ust,grid%mol,grid%pblh,grid%tke_myj, &
524 grid%exch_h,grid%thc,grid%snowc,grid%mavail,grid%hfx,grid%qfx,grid%rainbl, &
525 grid%tslb,grid%zs,grid%dzs,config_flags%num_soil_layers,grid%warm_rain, &
526 grid%adv_moist_cond, &
527 grid%apr_gr,grid%apr_w,grid%apr_mc,grid%apr_st,grid%apr_as, &
528 grid%apr_capma,grid%apr_capme,grid%apr_capmi, &
529 grid%xice,grid%vegfra,grid%snow,grid%canwat,grid%smstav, &
530 grid%smstot, grid%sfcrunoff,grid%udrunoff,grid%grdflx,grid%acsnow, &
531 grid%acsnom,grid%ivgtyp,grid%isltyp, grid%sfcevp,grid%smois, &
532 grid%sh2o, grid%snowh, grid%smfr3d, &
533 grid%DX,grid%DY,grid%f_ice_phy,grid%f_rain_phy,grid%f_rimef_phy, &
534 grid%mp_restart_state,grid%tbpvs_state,grid%tbpvs0_state,&
535 allowed_to_read, grid%moved, start_of_simulation, &
536 ids, ide, jds, jde, kds, kde, &
537 ims, ime, jms, jme, kms, kme, &
538 grid%i_start(ij), grid%i_end(ij), grid%j_start(ij), grid%j_end(ij), kts, kte, &
539 ozmixm,grid%pin, & ! Optional
540 grid%m_ps_1,grid%m_ps_2,grid%m_hybi,aerosolc_1,aerosolc_2,& ! Optional
541 grid%rundgdten,grid%rvndgdten,grid%rthndgdten, & ! Optional
542 grid%rqvndgdten,grid%rmundgdten, & ! Optional
543 grid%FGDT,grid%stepfg, & ! Optional
544 grid%DZR, grid%DZB, grid%DZG, & !Optional urban
545 grid%TR_URB2D,grid%TB_URB2D,grid%TG_URB2D,grid%TC_URB2D, & !Optional urban
546 grid%QC_URB2D, grid%XXXR_URB2D,grid%XXXB_URB2D, & !Optional urban
547 grid%XXXG_URB2D, grid%XXXC_URB2D, & !Optional urban
548 grid%TRL_URB3D, grid%TBL_URB3D, grid%TGL_URB3D, & !Optional urban
549 grid%SH_URB2D, grid%LH_URB2D, grid%G_URB2D, grid%RN_URB2D, & !Optional urban
550 grid%TS_URB2D, grid%FRC_URB2D, grid%UTYPE_URB2D, & !Optional urban
551 itimestep=grid%itimestep, fdob=grid%fdob &
552 )
553
554 ENDDO
555
556
557
558 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: After call to phy_init' )
559
560 #ifdef MCELIO
561 LU_MASK = 0.
562 WHERE ( grid%lu_index .EQ. 16 ) LU_MASK = 1.
563 #endif
564
565 END IF
566
567 #if 0
568 #include "CYCLE_TEST.inc"
569 #endif
570
571 !
572 !
573
574 ! set physical boundary conditions for all initialized variables
575
576 !-----------------------------------------------------------------------
577 ! Stencils for patch communications (WCS, 29 June 2001)
578 ! Note: the size of this halo exchange reflects the
579 ! fact that we are carrying the uncoupled variables
580 ! as state variables in the mass coordinate model, as
581 ! opposed to the coupled variables as in the height
582 ! coordinate model.
583 !
584 ! * * * * *
585 ! * * * * * * * * *
586 ! * + * * + * * * + * *
587 ! * * * * * * * * *
588 ! * * * * *
589 !
590 !j grid%em_u_1 x
591 !j grid%em_u_2 x
592 !j grid%em_v_1 x
593 !j grid%em_v_2 x
594 !j grid%em_w_1 x
595 !j grid%em_w_2 x
596 !j grid%em_t_1 x
597 !j grid%em_t_2 x
598 !j grid%em_ph_1 x
599 !j grid%em_ph_2 x
600 !
601 !j grid%em_t_init x
602 !
603 !j grid%em_phb x
604 !j grid%em_ph0 x
605 !j grid%em_php x
606 !j grid%em_pb x
607 !j grid%em_al x
608 !j grid%em_alt x
609 !j grid%em_alb x
610 !
611 ! the following are 2D (xy) variables
612 !
613 !j grid%em_mu_1 x
614 !j grid%em_mu_2 x
615 !j grid%em_mub x
616 !j grid%em_mu0 x
617 !j grid%ht x
618 !j grid%msft x
619 !j grid%msfu x
620 !j grid%msfv x
621 !j grid%sina x
622 !j grid%cosa x
623 !j grid%e x
624 !j grid%f x
625 !
626 ! 4D variables
627 !
628 ! moist x
629 ! chem x
630 !scalar x
631
632 !--------------------------------------------------------------
633
634 #ifdef DM_PARALLEL
635 # include "HALO_EM_INIT_1.inc"
636 # include "HALO_EM_INIT_2.inc"
637 # include "HALO_EM_INIT_3.inc"
638 # include "HALO_EM_INIT_4.inc"
639 # include "HALO_EM_INIT_5.inc"
640 # include "PERIOD_BDY_EM_INIT.inc"
641 # include "PERIOD_BDY_EM_MOIST.inc"
642 # include "PERIOD_BDY_EM_CHEM.inc"
643 #endif
644
645
646 CALL set_physical_bc3d( grid%em_u_1 , 'U' , config_flags , &
647 ids , ide , jds , jde , kds , kde , &
648 ims , ime , jms , jme , kms , kme , &
649 its , ite , jts , jte , kts , kte , &
650 its , ite , jts , jte , kts , kte )
651 CALL set_physical_bc3d( grid%em_u_2 , 'U' , config_flags , &
652 ids , ide , jds , jde , kds , kde , &
653 ims , ime , jms , jme , kms , kme , &
654 its , ite , jts , jte , kts , kte , &
655 its , ite , jts , jte , kts , kte )
656
657 CALL set_physical_bc3d( grid%em_v_1 , 'V' , config_flags , &
658 ids , ide , jds , jde , kds , kde , &
659 ims , ime , jms , jme , kms , kme , &
660 its , ite , jts , jte , kts , kte , &
661 its , ite , jts , jte , kts , kte )
662 CALL set_physical_bc3d( grid%em_v_2 , 'V' , config_flags , &
663 ids , ide , jds , jde , kds , kde , &
664 ims , ime , jms , jme , kms , kme , &
665 its , ite , jts , jte , kts , kte , &
666 its , ite , jts , jte , kts , kte )
667
668 ! set kinematic condition for w
669
670 CALL set_physical_bc2d( grid%ht , 'r' , config_flags , &
671 ids , ide , jds , jde , &
672 ims , ime , jms , jme , &
673 its , ite , jts , jte , &
674 its , ite , jts , jte )
675
676 IF ( .not. config_flags%restart ) THEN
677 CALL set_w_surface( config_flags, &
678 grid%em_w_1, grid%ht, grid%em_u_1, grid%em_v_1, grid%cf1, &
679 grid%cf2, grid%cf3, grid%rdx, grid%rdy, grid%msft, &
680 ids, ide, jds, jde, kds, kde, &
681 ips, ipe, jps, jpe, kps, kpe, &
682 its, ite, jts, jte, kts, kte, &
683 ims, ime, jms, jme, kms, kme )
684 CALL set_w_surface( config_flags, &
685 grid%em_w_2, grid%ht, grid%em_u_2, grid%em_v_2, grid%cf1, &
686 grid%cf2, grid%cf3, grid%rdx, grid%rdy, grid%msft, &
687 ids, ide, jds, jde, kds, kde, &
688 ips, ipe, jps, jpe, kps, kpe, &
689 its, ite, jts, jte, kts, kte, &
690 ims, ime, jms, jme, kms, kme )
691 END IF
692
693 ! finished setting kinematic condition for w at the surface
694
695 CALL set_physical_bc3d( grid%em_w_1 , 'W' , config_flags , &
696 ids , ide , jds , jde , kds , kde , &
697 ims , ime , jms , jme , kms , kme , &
698 its , ite , jts , jte , kts , kte , &
699 its , ite , jts , jte , kts , kte )
700 CALL set_physical_bc3d( grid%em_w_2 , 'W' , config_flags , &
701 ids , ide , jds , jde , kds , kde , &
702 ims , ime , jms , jme , kms , kme , &
703 its , ite , jts , jte , kts , kte , &
704 its , ite , jts , jte , kts , kte )
705
706 CALL set_physical_bc3d( grid%em_ph_1 , 'W' , config_flags , &
707 ids , ide , jds , jde , kds , kde , &
708 ims , ime , jms , jme , kms , kme , &
709 its , ite , jts , jte , kts , kte , &
710 its , ite , jts , jte , kts , kte )
711
712 CALL set_physical_bc3d( grid%em_ph_2 , 'W' , config_flags , &
713 ids , ide , jds , jde , kds , kde , &
714 ims , ime , jms , jme , kms , kme , &
715 its , ite , jts , jte , kts , kte , &
716 its , ite , jts , jte , kts , kte )
717
718 CALL set_physical_bc3d( grid%em_t_1 , 't' , config_flags , &
719 ids , ide , jds , jde , kds , kde , &
720 ims , ime , jms , jme , kms , kme , &
721 its , ite , jts , jte , kts , kte , &
722 its , ite , jts , jte , kts , kte )
723
724 CALL set_physical_bc3d( grid%em_t_2 , 't' , config_flags , &
725 ids , ide , jds , jde , kds , kde , &
726 ims , ime , jms , jme , kms , kme , &
727 its , ite , jts , jte , kts , kte , &
728 its , ite , jts , jte , kts , kte )
729
730 CALL set_physical_bc2d( grid%em_mu_1, 't' , config_flags , &
731 ids , ide , jds , jde , &
732 ims , ime , jms , jme , &
733 its , ite , jts , jte , &
734 its , ite , jts , jte )
735 CALL set_physical_bc2d( grid%em_mu_2, 't' , config_flags , &
736 ids , ide , jds , jde , &
737 ims , ime , jms , jme , &
738 its , ite , jts , jte , &
739 its , ite , jts , jte )
740 CALL set_physical_bc2d( grid%em_mub , 't' , config_flags , &
741 ids , ide , jds , jde , &
742 ims , ime , jms , jme , &
743 its , ite , jts , jte , &
744 its , ite , jts , jte )
745 CALL set_physical_bc2d( grid%em_mu0 , 't' , config_flags , &
746 ids , ide , jds , jde , &
747 ims , ime , jms , jme , &
748 its , ite , jts , jte , &
749 its , ite , jts , jte )
750
751
752 CALL set_physical_bc3d( grid%em_phb , 'W' , config_flags , &
753 ids , ide , jds , jde , kds , kde , &
754 ims , ime , jms , jme , kms , kme , &
755 its , ite , jts , jte , kts , kte , &
756 its , ite , jts , jte , kts , kte )
757 CALL set_physical_bc3d( grid%em_ph0 , 'W' , config_flags , &
758 ids , ide , jds , jde , kds , kde , &
759 ims , ime , jms , jme , kms , kme , &
760 its , ite , jts , jte , kts , kte , &
761 its , ite , jts , jte , kts , kte )
762 CALL set_physical_bc3d( grid%em_php , 'W' , config_flags , &
763 ids , ide , jds , jde , kds , kde , &
764 ims , ime , jms , jme , kms , kme , &
765 its , ite , jts , jte , kts , kte , &
766 its , ite , jts , jte , kts , kte )
767
768 CALL set_physical_bc3d( grid%em_pb , 't' , config_flags , &
769 ids , ide , jds , jde , kds , kde , &
770 ims , ime , jms , jme , kms , kme , &
771 its , ite , jts , jte , kts , kte , &
772 its , ite , jts , jte , kts , kte )
773 CALL set_physical_bc3d( grid%em_al , 't' , config_flags , &
774 ids , ide , jds , jde , kds , kde , &
775 ims , ime , jms , jme , kms , kme , &
776 its , ite , jts , jte , kts , kte , &
777 its , ite , jts , jte , kts , kte )
778 CALL set_physical_bc3d( grid%em_alt , 't' , config_flags , &
779 ids , ide , jds , jde , kds , kde , &
780 ims , ime , jms , jme , kms , kme , &
781 its , ite , jts , jte , kts , kte , &
782 its , ite , jts , jte , kts , kte )
783 CALL set_physical_bc3d( grid%em_alb , 't' , config_flags , &
784 ids , ide , jds , jde , kds , kde , &
785 ims , ime , jms , jme , kms , kme , &
786 its , ite , jts , jte , kts , kte , &
787 its , ite , jts , jte , kts , kte )
788 CALL set_physical_bc3d(grid%em_t_init, 't' , config_flags , &
789 ids , ide , jds , jde , kds , kde , &
790 ims , ime , jms , jme , kms , kme , &
791 its , ite , jts , jte , kts , kte , &
792 its , ite , jts , jte , kts , kte )
793
794 IF (num_moist > 0) THEN
795
796 ! use of (:,:,:,loop) not efficient on DEC, but (ims,kms,jms,loop) not portable to SGI/Cray
797
798 loop_3d_m : DO loop = 1 , num_moist
799 CALL set_physical_bc3d( moist(:,:,:,loop) , 'r' , config_flags , &
800 ids , ide , jds , jde , kds , kde , &
801 ims , ime , jms , jme , kms , kme , &
802 its , ite , jts , jte , kts , kte , &
803 its , ite , jts , jte , kts , kte )
804 END DO loop_3d_m
805
806 ENDIF
807
808 !wig 17-Oct-2006, begin: I think the following should be here...
809 IF (num_scalar > 0) THEN
810
811 ! use of (:,:,:,loop) not efficient on DEC, but (ims,kms,jms,loop) not portable to SGI/Cray
812
813 loop_3d_s : DO loop = 1 , num_scalar
814 CALL set_physical_bc3d( scalar(:,:,:,loop) , 'r' , config_flags , &
815 ids , ide , jds , jde , kds , kde , &
816 ims , ime , jms , jme , kms , kme , &
817 its , ite , jts , jte , kts , kte , &
818 its , ite , jts , jte , kts , kte )
819 END DO loop_3d_s
820
821 ENDIF
822 !wig end.
823
824
825 #ifdef WRF_CHEM
826 !
827 ! we do this here, so we only have one chem_init routine for either core....
828 !
829 do j=jts,min(jte,jde-1)
830 do i=its,min(ite,ide-1)
831 do k=kts,kte
832 z_at_w(i,k,j)=(grid%em_ph_2(i,k,j)+grid%em_phb(i,k,j))/g
833 enddo
834 do k=kts,min(kte,kde-1)
835 tempfac=(grid%em_t_1(i,k,j) + t0)*((grid%em_p(i,k,j) + grid%em_pb(i,k,j))/p1000mb)**rcp
836 convfac(i,k,j) = (grid%em_p(i,k,j)+grid%em_pb(i,k,j))/rgasuniv/tempfac
837 enddo
838 enddo
839 enddo
840
841 CALL chem_init (grid%id,chem,grid%dt,grid%bioemdt,grid%photdt, &
842 grid%chemdt, &
843 grid%stepbioe,grid%stepphot,grid%stepchem, &
844 z_at_w,g,grid%aerwrf,config_flags, &
845 grid%em_alt,grid%em_t_1,grid%em_p,convfac, &
846 grid%gd_cloud, grid%gd_cloud2, &
847 grid%gd_cloud_b, grid%gd_cloud2_b, &
848 grid%tauaer1,grid%tauaer2,grid%tauaer3,grid%tauaer4, &
849 grid%gaer1,grid%gaer2,grid%gaer3,grid%gaer4, &
850 grid%waer1,grid%waer2,grid%waer3,grid%waer4, &
851 grid%pm2_5_dry,grid%pm2_5_water,grid%pm2_5_dry_ec, &
852 grid%chem_in_opt, &
853 ids , ide , jds , jde , kds , kde , &
854 ims , ime , jms , jme , kms , kme , &
855 its , ite , jts , jte , kts , kte )
856
857 !
858 ! calculate initial pm
859 !
860 ! print *,'calculating initial pm'
861 select case (config_flags%chem_opt)
862 case (RADM2SORG, RACMSORG)
863 call sum_pm_sorgam ( &
864 grid%em_alt, chem, grid%h2oaj, grid%h2oai, &
865 grid%pm2_5_dry, grid%pm2_5_water, grid%pm2_5_dry_ec, grid%pm10, &
866 ids,ide, jds,jde, kds,kde, &
867 ims,ime, jms,jme, kms,kme, &
868 its,ite, jts,jte, kts,kte )
869
870 case (CBMZ_MOSAIC_4BIN,CBMZ_MOSAIC_8BIN,CBMZ_MOSAIC_4BIN_AQ,CBMZ_MOSAIC_8BIN_AQ)
871 call sum_pm_mosaic ( &
872 grid%em_alt, chem, &
873 grid%pm2_5_dry, grid%pm2_5_water, grid%pm2_5_dry_ec, grid%pm10, &
874 ids,ide, jds,jde, kds,kde, &
875 ims,ime, jms,jme, kms,kme, &
876 its,ite, jts,jte, kts,kte )
877
878 case default
879 do j=jts,min(jte,jde-1)
880 do k=kts,min(kte,kde-1)
881 do i=its,min(ite,ide-1)
882 grid%pm2_5_dry(i,k,j) = 0.
883 grid%pm2_5_water(i,k,j) = 0.
884 grid%pm2_5_dry_ec(i,k,j) = 0.
885 grid%pm10(i,k,j) = 0.
886 enddo
887 enddo
888 enddo
889 end select
890 #endif
891
892 IF (num_chem >= PARAM_FIRST_SCALAR ) THEN
893 ! use of (:,:,:,loop) not efficient on DEC, but (ims,kms,jms,loop) not portable to SGI/Cray
894
895 loop_3d_c : DO loop = PARAM_FIRST_SCALAR , num_chem
896 CALL set_physical_bc3d( chem(:,:,:,loop) , 'r' , config_flags , &
897 ids , ide , jds , jde , kds , kde , &
898 ims , ime , jms , jme , kms , kme , &
899 its , ite , jts , jte , kts , kte , &
900 its , ite , jts , jte , kts , kte )
901 END DO loop_3d_c
902
903 ENDIF
904
905 CALL set_physical_bc2d( grid%msft , 'r' , config_flags , &
906 ids , ide , jds , jde , &
907 ims , ime , jms , jme , &
908 its , ite , jts , jte , &
909 its , ite , jts , jte )
910 CALL set_physical_bc2d( grid%msfu , 'x' , config_flags , &
911 ids , ide , jds , jde , &
912 ims , ime , jms , jme , &
913 its , ite , jts , jte , &
914 its , ite , jts , jte )
915 CALL set_physical_bc2d( grid%msfv , 'y' , config_flags , &
916 ids , ide , jds , jde , &
917 ims , ime , jms , jme , &
918 its , ite , jts , jte , &
919 its , ite , jts , jte )
920 CALL set_physical_bc2d( grid%sina , 'r' , config_flags , &
921 ids , ide , jds , jde , &
922 ims , ime , jms , jme , &
923 its , ite , jts , jte , &
924 its , ite , jts , jte )
925 CALL set_physical_bc2d( grid%cosa , 'r' , config_flags , &
926 ids , ide , jds , jde , &
927 ims , ime , jms , jme , &
928 its , ite , jts , jte , &
929 its , ite , jts , jte )
930 CALL set_physical_bc2d( grid%e , 'r' , config_flags , &
931 ids , ide , jds , jde , &
932 ims , ime , jms , jme , &
933 its , ite , jts , jte , &
934 its , ite , jts , jte )
935 CALL set_physical_bc2d( grid%f , 'r' , config_flags , &
936 ids , ide , jds , jde , &
937 ims , ime , jms , jme , &
938 its , ite , jts , jte , &
939 its , ite , jts , jte )
940
941 #ifndef WRF_CHEM
942 DEALLOCATE(CLDFRA_OLD)
943 #endif
944 #ifdef DM_PARALLEL
945 # include "HALO_EM_INIT_1.inc"
946 # include "HALO_EM_INIT_2.inc"
947 # include "HALO_EM_INIT_3.inc"
948 # include "HALO_EM_INIT_4.inc"
949 # include "HALO_EM_INIT_5.inc"
950 # include "PERIOD_BDY_EM_INIT.inc"
951 # include "PERIOD_BDY_EM_MOIST.inc"
952 # include "PERIOD_BDY_EM_CHEM.inc"
953 #endif
954
955 CALL wrf_debug ( 100 , 'module_start: start_domain_rk: Returning' )
956
957 RETURN
958
959 END SUBROUTINE start_domain_em
960