module_wrf_esmf_super.F

References to this file elsewhere.
1 !WRF:DRIVER_LAYER:ESMF
2 !
3 
4 MODULE module_wrf_esmf_super
5 !<DESCRIPTION>
6 ! This module defines wrf_init(), wrf_run(), and wrf_finalize() routines for 
7 ! use by ESMF superstructure.  
8 !</DESCRIPTION>
9 
10    USE module_machine
11    USE module_domain
12    USE module_integrate
13 #ifdef WRF_PLUS
14    USE module_integrate_ad
15    USE module_integrate_tl
16    USE module_integrate_tst
17 #endif
18    USE module_driver_constants
19    USE module_configure
20 
21    USE module_timing
22    USE module_wrf_error
23 
24    USE ESMF_BaseMod
25    USE ESMF_Stubs
26 
27 #ifdef DM_PARALLEL
28    USE module_dm
29 #endif
30 
31    USE ESMF_ClockMod
32 
33    IMPLICIT NONE
34 
35    REAL    :: time
36 
37    INTEGER :: loop , &
38               levels_to_process
39 
40    TYPE (domain) , POINTER :: keep_grid, grid_ptr, null_domain
41    TYPE (grid_config_rec_type), SAVE :: config_flags
42    INTEGER                 :: number_at_same_level
43    INTEGER                 :: time_step_begin_restart
44 
45    INTEGER :: max_dom , domain_id , fid , oid , idum1 , idum2 , ierr
46    INTEGER :: debug_level
47    LOGICAL :: input_from_file
48 
49 #ifdef DM_PARALLEL
50    INTEGER                 :: nbytes
51    INTEGER, PARAMETER      :: configbuflen = 4* CONFIG_BUF_LEN
52    INTEGER                 :: configbuf( configbuflen )
53    LOGICAL , EXTERNAL      :: wrf_dm_on_monitor
54 #endif
55 
56    CHARACTER (LEN=80)      :: rstname
57    CHARACTER (LEN=80)      :: message
58 
59    INTERFACE 
60      SUBROUTINE Setup_Timekeeping( grid )
61       USE module_domain
62       TYPE(domain), POINTER :: grid
63      END SUBROUTINE Setup_Timekeeping
64    END INTERFACE
65 
66 
67 CONTAINS
68 
69 
70    SUBROUTINE wrf_init( gcomp, importState, exportState, clock, rc )
71      TYPE(ESMF_GridComp), INTENT(INOUT) :: gcomp
72      TYPE(ESMF_State), INTENT(INOUT) :: importState, exportState
73      TYPE(ESMF_Clock), INTENT(INOUT) :: clock
74      INTEGER, INTENT(OUT) :: rc
75 !<DESCRIPTION>
76 !     WRF init routine.
77 !
78 !     The arguments are:
79 !       gcomp           Component
80 !       importState     Importstate
81 !       exportState     Exportstate
82 !       clock           External clock
83 !       rc              Return code; equals ESMF_SUCCESS if there are no errors,
84 !                       otherwise ESMF_FAILURE.
85 !</DESCRIPTION>
86 
87 !<DESCRIPTION>
88 ! Program_name, a global variable defined in frame/module_domain.F, is
89 ! set, then a routine <a href=init_modules.html>init_modules</a> is
90 ! called. This calls all the init programs that are provided by the
91 ! modules that are linked into WRF.  These include initialization of
92 ! external I/O packages.   Also, some key initializations for
93 ! distributed-memory parallelism occur here if DM_PARALLEL is specified
94 ! in the compile: setting up I/O quilt processes to act as I/O servers
95 ! and dividing up MPI communicators among those as well as initializing
96 ! external communication packages such as RSL or RSL_LITE.
97 !
98 !</DESCRIPTION>
99 
100    program_name = "WRF V2.0.3.1 MODEL"
101 
102    !  Get the NAMELIST data for input.
103 
104    CALL init_modules(2)   ! Phase 2 resumes after MPI_INIT() (if it is called)
105                           ! Phase 1 is called before ESMF starts up
106 
107 !<DESCRIPTION>
108 ! The wrf namelist.input file is read and stored in the USE associated
109 ! structure model_config_rec, defined in frame/module_configure.F, by the
110 ! call to <a href=initial_config.html>initial_config</a>.  On distributed
111 ! memory parallel runs this is done only on one processor, and then
112 ! broadcast as a buffer.  For distributed-memory, the broadcast of the
113 ! configuration information is accomplished by first putting the
114 ! configuration information into a buffer (<a
115 ! href=get_config_as_buffer.html>get_config_as_buffer</a>), broadcasting
116 ! the buffer, then setting the configuration information (<a
117 ! href=set_config_as_buffer.html>set_config_as_buffer</a>).
118 !
119 !</DESCRIPTION>
120 
121 #ifdef DM_PARALLEL
122    IF ( wrf_dm_on_monitor() ) THEN
123      CALL initial_config
124    ENDIF
125    CALL get_config_as_buffer( configbuf, configbuflen, nbytes )
126    CALL wrf_dm_bcast_bytes( configbuf, nbytes )
127    CALL set_config_as_buffer( configbuf, configbuflen )
128    CALL wrf_dm_initialize
129 #else
130    CALL initial_config
131 #endif
132 
133 !<DESCRIPTION>
134 ! Among the configuration variables read from the namelist is
135 ! debug_level. This is retrieved using nl_get_debug_level (Registry
136 ! generated and defined in frame/module_configure.F).  The value is then
137 ! used to set the debug-print information level for use by <a
138 ! href=wrf_debug.html>wrf_debug</a> throughout the code. Debug_level
139 ! of zero (the default) causes no information to be printed when the
140 ! model runs. The higher the number (up to 1000) the more information is
141 ! printed.
142 ! 
143 !</DESCRIPTION>
144 
145    CALL nl_get_debug_level ( 1, debug_level )
146    CALL set_wrf_debug_level ( debug_level )
147 
148    ! allocated and configure the mother domain
149 
150    NULLIFY( null_domain )
151 
152 !<DESCRIPTION>
153 ! RSL is required for WRF nesting options.
154 ! The non-MPI build that allows nesting is only supported on machines
155 ! with the -DSTUBMPI option.  Check to see if the WRF model is being asked 
156 ! for a for a multi-domain run (max_dom > 1, from the namelist).  If so,
157 ! then we check to make sure that we are under the parallel
158 ! run option or we are on an acceptable machine.
159 !</DESCRIPTION>
160 
161    CALL nl_get_max_dom( 1, max_dom )
162    IF ( max_dom > 1 ) THEN
163 #if ( ! defined(DM_PARALLEL)  &&   ! defined(STUBMPI) )
164    CALL wrf_error_fatal( &
165      'nesting requires either an MPI build or use of the -DSTUBMPI option' ) 
166 #endif
167 #if ( defined ( MOVE_NESTS ) )
168    CALL wrf_message( '                      W A R N I N G                          ' )
169    CALL wrf_message( '  MOVING NEST CAPABILITY IS EXPERIMENTAL AND UNSUPPORTED     ' )
170    CALL wrf_message( '                 IN THIS VERSION OF WRF                      ' )
171    CALL wrf_message( '          U S E   A T   Y O U R   O W N   R I S K            ' )
172 #endif
173    END IF
174 
175 !<DESCRIPTION>
176 ! The top-most domain in the simulation is then allocated and configured
177 ! by calling <a href=alloc_and_configure_domain.html>alloc_and_configure_domain</a>.
178 ! Here, in the case of this root domain, the routine is passed the
179 ! globally accessible pointer to TYPE(domain), head_grid, defined in
180 ! frame/module_domain.F.  The parent is null and the child index is given
181 ! as negative, signifying none.  Afterwards, because the call to
182 ! alloc_and_configure_domain may modify the model's configuration data
183 ! stored in model_config_rec, the configuration information is again
184 ! repacked into a buffer, broadcast, and unpacked on each task (for
185 ! DM_PARALLEL compiles). The call to <a
186 ! href=setup_timekeeping.html>setup_timekeeping</a> for head_grid relies
187 ! on this configuration information, and it must occur after the second
188 ! broadcast of the configuration information.
189 ! 
190 !</DESCRIPTION>
191    CALL wrf_message ( program_name )
192    CALL wrf_debug ( 100 , 'wrf: calling alloc_and_configure_domain ' )
193    CALL alloc_and_configure_domain ( domain_id  = 1 ,                  &
194                                      grid       = head_grid ,          &
195                                      parent     = null_domain ,        &
196                                      kid        = -1                   )
197 
198    CALL wrf_debug ( 100 , 'wrf: calling model_to_grid_config_rec ' )
199    CALL model_to_grid_config_rec ( head_grid%id , model_config_rec , config_flags )
200    CALL wrf_debug ( 100 , 'wrf: calling set_scalar_indices_from_config ' )
201    CALL set_scalar_indices_from_config ( head_grid%id , idum1, idum2 )
202    CALL wrf_debug ( 100 , 'wrf: calling init_wrfio' )
203    CALL init_wrfio
204 
205 #ifdef DM_PARALLEL
206    CALL get_config_as_buffer( configbuf, configbuflen, nbytes )
207    CALL wrf_dm_bcast_bytes( configbuf, nbytes )
208    CALL set_config_as_buffer( configbuf, configbuflen )
209 #endif
210 
211    CALL Setup_Timekeeping (head_grid)
212 
213 !<DESCRIPTION>
214 ! The head grid is initialized with read-in data through the call to <a
215 ! href=med_initialdata_input.html>med_initialdata_input</a>, which is
216 ! passed the pointer head_grid and a locally declared configuration data
217 ! structure, config_flags, that is set by a call to <a
218 ! href=model_to_grid_config_rec.html>model_to_grid_config_rec</a>.  It is
219 ! also necessary that the indices into the 4d tracer arrays such as
220 ! moisture be set with a call to <a
221 ! href=set_scalar_indices_from_config.html>set_scalar_indices_from_config</a>
222 ! prior to the call to initialize the domain.  Both of these calls are
223 ! told which domain they are setting up for by passing in the integer id
224 ! of the head domain as <tt>head_grid%id</tt>, which is 1 for the
225 ! top-most domain.
226 ! 
227 ! In the case that write_restart_at_0h is set to true in the namelist,
228 ! the model simply generates a restart file using the just read-in data
229 ! and then shuts down. This is used for ensemble breeding, and is not
230 ! typically enabled.
231 ! 
232 !</DESCRIPTION>
233 
234    CALL med_initialdata_input( head_grid , config_flags )
235 
236 ! TBH:  unscramble this later
237 ! TBH:  ESMF will need to call wrf_finalize()
238    IF ( config_flags%write_restart_at_0h ) THEN
239       CALL med_restart_out ( head_grid, config_flags )
240 #ifndef AUTODOC_BUILD
241 ! prevent this from showing up before the call to integrate in the autogenerated call tree
242       CALL wrf_debug ( 0 , ' 0 h restart only wrf: SUCCESS COMPLETE WRF' )
243       CALL wrf_finalize( gcomp, importState, exportState, clock, rc )
244 #endif
245    END IF
246 
247 !<DESCRIPTION>
248 ! Once the top-level domain has been allocated, configured, and
249 ! initialized, the model time integration is ready to proceed.  The start
250 ! and stop times for the domain are set to the start and stop time of the
251 ! model run, and then <a href=integrate.html>integrate</a> is called to
252 ! advance the domain forward through that specified time interval.  On
253 ! return, the simulation is completed.  A Mediation Layer-provided
254 ! subroutine, <a href=med_shutdown_io.html>med_shutdown_io</a> is called
255 ! to allow the the model to do any I/O specific cleanup and shutdown, and
256 ! then the WRF Driver Layer routine <a
257 ! href=wrf_shutdown.html>wrf_shutdown</a> (quilt servers would be
258 ! directed to shut down here) is called to properly end the run,
259 ! including shutting down the communications (for example, most comm
260 ! layers would call MPI_FINALIZE at this point if they're using MPI).
261 ! 
262 !</DESCRIPTION>
263 
264 
265    !  The forecast integration for the most coarse grid is now started.  The
266    !  integration is from the first step (1) to the last step of the simulation.
267 
268    head_grid%start_subtime = domain_get_start_time(head_grid)
269    head_grid%stop_subtime =  domain_get_stop_time(head_grid)
270 
271    ! return success status
272    rc = ESMF_SUCCESS
273 
274    END SUBROUTINE wrf_init
275 
276    SUBROUTINE wrf_run( gcomp, importState, exportState, clock, rc )
277      TYPE(ESMF_GridComp), INTENT(INOUT) :: gcomp
278      TYPE(ESMF_State), INTENT(INOUT) :: importState, exportState
279      TYPE(ESMF_Clock), INTENT(INOUT) :: clock
280      INTEGER, INTENT(OUT) :: rc
281 
282      character(len=19), dimension(1000) :: nl_date_string
283      integer                            :: nl_date_index
284      character(len=19), dimension(1000) :: ad_date_string
285      integer                            :: ad_date_index
286 
287 !<DESCRIPTION>
288 !     WRF run routine.
289 !
290 !     The arguments are:
291 !       gcomp           Component
292 !       importState     Importstate
293 !       exportState     Exportstate
294 !       clock           External clock
295 !       rc              Return code; equals ESMF_SUCCESS if there are no errors,
296 !                       otherwise ESMF_FAILURE.
297 !</DESCRIPTION>
298 
299    CALL wrf_debug ( 100 , 'wrf: calling integrate' )
300 
301    IF ( config_flags%dyn_opt == DYN_EM ) THEN
302       CALL integrate ( head_grid )
303 #ifdef WRF_PLUS
304    ELSE IF ( config_flags%dyn_opt == DYN_EM_SN ) THEN
305       CALL integrate ( head_grid )
306    ELSE IF ( config_flags%dyn_opt == DYN_EM_TL ) THEN
307 
308 ! - Look for boundary data after writing out history and restart files
309       CALL med_latbound_in ( head_grid , config_flags )
310 
311       CALL Setup_date_string (head_grid, nl_date_string, 'tn' )
312       nl_date_index = 1
313 
314 ! JRB bugger it
315       CALL ESMF_ClockSetCurrTime( head_grid%domain_clock, currTime=domain_get_start_time(head_grid), rc=rc )
316 !      CALL ESMF_ClockGetCurrTime( head_grid%domain_clock, currTime=domain_get_start_time(head_grid), rc=rc )
317 
318       CALL integrate_tl ( head_grid, nl_date_string, nl_date_index )
319 
320    ELSE IF ( config_flags%dyn_opt == DYN_EM_AD ) THEN
321 
322 ! - Look for boundary data after writing out history and restart files
323       CALL med_latbound_in ( head_grid , config_flags )
324 
325       CALL Setup_date_string (head_grid, ad_date_string, 'ad' )
326       CALL Setup_date_string (head_grid, nl_date_string, 'an' )
327       nl_date_index = 1
328       ad_date_index = 1
329 
330 !JRB bugger it
331       CALL ESMF_ClockSetCurrTime( head_grid%domain_clock, currTime=domain_get_stop_time(head_grid), rc=rc )
332 !      CALL ESMF_ClockGetCurrTime( head_grid%domain_clock, currTime=domain_get_stop_time(head_grid), rc=rc )
333 
334       CALL integrate_ad ( head_grid, nl_date_string, nl_date_index, ad_date_string, ad_date_index )
335    ELSE IF ( config_flags%dyn_opt == DYN_EM_TST ) THEN
336       CALL integrate_tst ( head_grid )
337 #endif
338    ENDIF
339 
340    CALL wrf_debug ( 100 , 'wrf: back from integrate' )
341 
342    ! return success status
343    rc = ESMF_SUCCESS
344 
345    END SUBROUTINE wrf_run
346 
347 
348 
349    SUBROUTINE wrf_finalize( gcomp, importState, exportState, clock, rc )
350      TYPE(ESMF_GridComp), INTENT(INOUT) :: gcomp
351      TYPE(ESMF_State), INTENT(INOUT) :: importState, exportState
352      TYPE(ESMF_Clock), INTENT(INOUT) :: clock
353      INTEGER, INTENT(OUT) :: rc
354 !<DESCRIPTION>
355 !     WRF finalize routine.
356 !
357 !     The arguments are:
358 !       gcomp           Component
359 !       importState     Importstate
360 !       exportState     Exportstate
361 !       clock           External clock
362 !       rc              Return code; equals ESMF_SUCCESS if there are no errors,
363 !                       otherwise ESMF_FAILURE.
364 !</DESCRIPTION>
365 
366    CALL med_shutdown_io ( head_grid , config_flags )
367    CALL wrf_debug ( 100 , 'wrf: back from med_shutdown_io' )
368 
369    CALL wrf_debug (   0 , 'wrf: SUCCESS COMPLETE WRF' )
370    CALL wrf_shutdown
371 
372    ! return success status
373    rc = ESMF_SUCCESS
374 
375    END SUBROUTINE wrf_finalize
376 
377 
378 END MODULE module_wrf_esmf_super
379 
380