source: palm/trunk/SOURCE/palm.f90 @ 2178

Last change on this file since 2178 was 2178, checked in by hellstea, 7 years ago

Nesting bugfixes

  • Property svn:keywords set to Id
File size: 16.0 KB
Line 
1!> @file palm.f90
2!------------------------------------------------------------------------------!
3! This file is part of PALM.
4!
5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2017 Leibniz Universitaet Hannover
18!------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 2178 2017-03-17 11:07:39Z hellstea $
27!
28! 2118 2017-01-17 16:38:49Z raasch
29! OpenACC directives and related code removed
30!
31! 2011 2016-09-19 17:29:57Z kanani
32! Flag urban_surface is now defined in module control_parameters.
33!
34! 2007 2016-08-24 15:47:17Z kanani
35! Temporarily added CALL for writing of restart data for urban surface model
36!
37! 2000 2016-08-20 18:09:15Z knoop
38! Forced header and separation lines into 80 columns
39!
40! 1976 2016-07-27 13:28:04Z maronga
41! Added call to radiation_last_actions for binary output of land surface model
42! data
43!
44! 1972 2016-07-26 07:52:02Z maronga
45! Added call to lsm_last_actions for binary output of land surface model data
46!
47! 1960 2016-07-12 16:34:24Z suehring
48! Separate humidity and passive scalar
49!
50! 1834 2016-04-07 14:34:20Z raasch
51! Initial version of purely vertical nesting introduced.
52!
53! 1833 2016-04-07 14:23:03Z raasch
54! required user interface version changed
55!
56! 1808 2016-04-05 19:44:00Z raasch
57! routine local_flush replaced by FORTRAN statement
58!
59! 1783 2016-03-06 18:36:17Z raasch
60! required user interface version changed
61!
62! 1781 2016-03-03 15:12:23Z raasch
63! pmc initialization moved from time_integration to here
64!
65! 1779 2016-03-03 08:01:28Z raasch
66! setting of nest_domain and coupling_char moved to the pmci
67!
68! 1764 2016-02-28 12:45:19Z raasch
69! cpp-statements for nesting removed, communicator settings cleaned up
70!
71! 1762 2016-02-25 12:31:13Z hellstea
72! Introduction of nested domain feature
73!
74! 1747 2016-02-08 12:25:53Z raasch
75! OpenACC-adjustment for new surface layer parameterization
76!
77! 1682 2015-10-07 23:56:08Z knoop
78! Code annotations made doxygen readable
79!
80! 1668 2015-09-23 13:45:36Z raasch
81! warning replaced by abort in case of failed user interface check
82!
83! 1666 2015-09-23 07:31:10Z raasch
84! check for user's interface version added
85!
86! 1482 2014-10-18 12:34:45Z raasch
87! adjustments for using CUDA-aware OpenMPI
88!
89! 1468 2014-09-24 14:06:57Z maronga
90! Adapted for use on up to 6-digit processor cores
91!
92! 1402 2014-05-09 14:25:13Z raasch
93! location messages added
94!
95! 1374 2014-04-25 12:55:07Z raasch
96! bugfix: various modules added
97!
98! 1320 2014-03-20 08:40:49Z raasch
99! ONLY-attribute added to USE-statements,
100! kind-parameters added to all INTEGER and REAL declaration statements,
101! kinds are defined in new module kinds,
102! old module precision_kind is removed,
103! revision history before 2012 removed,
104! comment fields (!:) to be used for variable explanations added to
105! all variable declaration statements
106!
107! 1318 2014-03-17 13:35:16Z raasch
108! module interfaces removed
109!
110! 1241 2013-10-30 11:36:58Z heinze
111! initialization of nuding and large scale forcing from external file
112!
113! 1221 2013-09-10 08:59:13Z raasch
114! +wall_flags_00, rflags_invers, rflags_s_inner in copyin statement
115!
116! 1212 2013-08-15 08:46:27Z raasch
117! +tri in copyin statement
118!
119! 1179 2013-06-14 05:57:58Z raasch
120! ref_state added to copyin-list
121!
122! 1113 2013-03-10 02:48:14Z raasch
123! openACC statements modified
124!
125! 1111 2013-03-08 23:54:10Z raasch
126! openACC statements updated
127!
128! 1092 2013-02-02 11:24:22Z raasch
129! unused variables removed
130!
131! 1036 2012-10-22 13:43:42Z raasch
132! code put under GPL (PALM 3.9)
133!
134! 1015 2012-09-27 09:23:24Z raasch
135! Version number changed from 3.8 to 3.8a.
136! OpenACC statements added + code changes required for GPU optimization
137!
138! 849 2012-03-15 10:35:09Z raasch
139! write_particles renamed lpm_write_restart_file
140!
141! Revision 1.1  1997/07/24 11:23:35  raasch
142! Initial revision
143!
144!
145! Description:
146! ------------
147!> Large-Eddy Simulation (LES) model for the convective boundary layer,
148!> optimized for use on parallel machines (implementation realized using the
149!> Message Passing Interface (MPI)). The model can also be run on vector machines
150!> (less well optimized) and workstations. Versions for the different types of
151!> machines are controlled via cpp-directives.
152!> Model runs are only feasible using the ksh-script mrun.
153!>
154!> @todo create routine last_actions instead of calling lsm_last_actions etc.
155!> @todo eventually move CALL usm_write_restart_data to suitable location
156!------------------------------------------------------------------------------!
157 PROGRAM palm
158 
159
160    USE arrays_3d
161
162    USE control_parameters,                                                    &
163        ONLY:  cloud_physics, constant_diffusion, coupling_char, coupling_mode,&
164               do2d_at_begin, do3d_at_begin, humidity, initializing_actions,   &
165               io_blocks, io_group,                                            &
166               large_scale_forcing, message_string, microphysics_seifert,      &
167               nest_domain, neutral,                                           &
168               nudging, passive_scalar, simulated_time, simulated_time_chr,    &
169               urban_surface,                                                  &
170               user_interface_current_revision,                                &
171               user_interface_required_revision, version, wall_heatflux,       &
172               write_binary
173
174    USE cpulog,                                                                &
175        ONLY:  cpu_log, log_point, cpu_statistics
176
177    USE grid_variables,                                                        &
178        ONLY:  fxm, fxp, fym, fyp, fwxm, fwxp, fwym, fwyp, wall_e_x, wall_e_y, &
179               wall_u, wall_v, wall_w_x, wall_w_y
180
181    USE indices,                                                               &
182        ONLY:  nbgp, ngp_2dh, ngp_2dh_s_inner, nzb_diff_s_inner, nzb_diff_s_outer,   &
183               nzb_diff_u, nzb_diff_v, nzb_s_inner, nzb_s_outer, nzb_u_inner,  &
184               nzb_u_outer, nzb_v_inner, nzb_v_outer, nzb_w_inner,             &
185               nzb_w_outer, rflags_invers, rflags_s_inner, wall_flags_0,       &
186               wall_flags_00
187
188    USE kinds
189
190    USE land_surface_model_mod,                                                &
191        ONLY:  land_surface, lsm_last_actions
192
193    USE ls_forcing_mod,                                                        &
194        ONLY:  init_ls_forcing
195
196    USE nudge_mod,                                                             &
197        ONLY:  init_nudge
198
199    USE particle_attributes,                                                   &
200        ONLY:  particle_advection
201
202    USE pegrid
203
204    USE pmc_interface,                                                         &
205        ONLY:  cpl_id, nested_run, pmci_child_initialize, pmci_init,           &
206               pmci_modelconfiguration, pmci_parent_initialize,                &
207               pmci_ensure_nest_mass_conservation
208
209    USE radiation_model_mod,                                                   &
210        ONLY:  radiation, radiation_last_actions
211
212    USE statistics,                                                            &
213        ONLY:  hom, rmask, weight_pres, weight_substep
214
215    USE surface_layer_fluxes_mod,                                              &
216        ONLY:  pt1, qv1, uv_total
217       
218    USE urban_surface_mod,                                                     &
219        ONLY:  usm_write_restart_data       
220
221    IMPLICIT NONE
222
223!
224!-- Local variables
225    CHARACTER(LEN=9)  ::  time_to_string  !<
226    CHARACTER(LEN=10) ::  env_string      !< to store string of environment var
227    INTEGER(iwp)      ::  env_stat        !< to hold status of GET_ENV
228    INTEGER(iwp)      ::  i               !<
229    INTEGER(iwp)      ::  myid_openmpi    !< OpenMPI local rank for CUDA aware MPI
230
231    version = 'PALM 4.0'
232    user_interface_required_revision = 'r1819'
233
234#if defined( __parallel )
235!
236!-- MPI initialisation. comm2d is preliminary set, because
237!-- it will be defined in init_pegrid but is used before in cpu_log.
238    CALL MPI_INIT( ierr )
239
240!
241!-- Initialize the coupling for nested-domain runs
242!-- comm_palm is the communicator which includes all PEs (MPI processes)
243!-- available for this (nested) model. If it is not a nested run, comm_palm
244!-- is returned as MPI_COMM_WORLD
245    CALL pmci_init( comm_palm )
246    comm2d = comm_palm
247!
248!-- Get the (preliminary) number of MPI processes and the local PE-id (in case
249!-- of a further communicator splitting in init_coupling, these numbers will
250!-- be changed in init_pegrid).
251    IF ( nested_run )  THEN
252
253       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
254       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
255
256    ELSE
257
258       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
259       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
260!
261!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
262!--    will be splitted in init_coupling)
263       CALL init_coupling
264    ENDIF
265#endif
266
267!
268!-- Initialize measuring of the CPU-time remaining to the run
269    CALL local_tremain_ini
270
271!
272!-- Start of total CPU time measuring.
273    CALL cpu_log( log_point(1), 'total', 'start' )
274    CALL cpu_log( log_point(2), 'initialisation', 'start' )
275
276!
277!-- Open a file for debug output
278    WRITE (myid_char,'(''_'',I6.6)')  myid
279    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
280
281!
282!-- Initialize dvrp logging. Also, one PE maybe split from the global
283!-- communicator for doing the dvrp output. In that case, the number of
284!-- PEs available for PALM is reduced by one and communicator comm_palm
285!-- is changed respectively.
286#if defined( __parallel )
287    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
288!
289!-- TEST OUTPUT (TO BE REMOVED)
290    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
291    FLUSH( 9 )
292    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
293       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
294               TRIM( coupling_mode )
295    ENDIF
296#endif
297
298    CALL init_dvrp_logging
299
300!
301!-- Read control parameters from NAMELIST files and read environment-variables
302    CALL parin
303
304!
305!-- Check for the user's interface version
306    IF ( user_interface_current_revision /= user_interface_required_revision )  &
307    THEN
308       message_string = 'current user-interface revision "' //                  &
309                        TRIM( user_interface_current_revision ) // '" does ' // &
310                        'not match the required revision ' //                   &
311                        TRIM( user_interface_required_revision )
312        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
313    ENDIF
314
315!
316!-- Determine processor topology and local array indices
317    CALL init_pegrid
318
319!
320!-- Generate grid parameters
321    CALL init_grid
322
323!
324!-- Initialize nudging if required
325    IF ( nudging )  THEN
326       CALL init_nudge
327    ENDIF
328
329!
330!-- Initialize reading of large scale forcing from external file - if required
331    IF ( large_scale_forcing )  THEN
332       CALL init_ls_forcing
333    ENDIF
334
335!
336!-- Check control parameters and deduce further quantities
337    CALL check_parameters
338
339!
340!-- Initialize all necessary variables
341    CALL init_3d_model
342
343!
344!-- Coupling protocol setup for nested-domain runs
345    IF ( nested_run )  THEN
346       CALL pmci_modelconfiguration
347!
348!--    Receive and interpolate initial data on children.
349!--    Child initialization must be made first if the model is both child and
350!--    parent if necessary
351       IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
352          CALL pmci_child_initialize
353!
354!--       Send initial condition data from parent to children
355          CALL pmci_parent_initialize
356!
357!--    Exchange_horiz is needed after the nest initialization
358          IF ( nest_domain )  THEN
359             CALL exchange_horiz( u, nbgp )
360             CALL exchange_horiz( v, nbgp )
361             CALL exchange_horiz( w, nbgp )
362             IF ( .NOT. neutral )  THEN
363                CALL exchange_horiz( pt, nbgp )
364             ENDIF
365             IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
366             IF ( humidity )  THEN
367                CALL exchange_horiz( q, nbgp )
368                IF ( cloud_physics  .AND.  microphysics_seifert )  THEN
369!                   CALL exchange_horiz( qc, nbgp )
370                   CALL exchange_horiz( qr, nbgp ) 
371!                   CALL exchange_horiz( nc, nbgp )
372                   CALL exchange_horiz( nr, nbgp )
373                ENDIF
374             ENDIF
375             IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
376
377             CALL pmci_ensure_nest_mass_conservation
378             CALL pres
379          ENDIF
380       ENDIF
381
382    ENDIF
383
384!
385!-- Output of program header
386    IF ( myid == 0 )  CALL header
387
388    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
389
390!
391!-- Set start time in format hh:mm:ss
392    simulated_time_chr = time_to_string( simulated_time )
393
394!
395!-- If required, output of initial arrays
396    IF ( do2d_at_begin )  THEN
397       CALL data_output_2d( 'xy', 0 )
398       CALL data_output_2d( 'xz', 0 )
399       CALL data_output_2d( 'yz', 0 )
400    ENDIF
401
402    IF ( do3d_at_begin )  THEN
403       CALL data_output_3d( 0 )
404    ENDIF
405
406!
407!-- Integration of the model equations using timestep-scheme
408    CALL time_integration
409
410!
411!-- If required, write binary data for restart runs
412    IF ( write_binary(1:4) == 'true' )  THEN
413
414       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
415
416       CALL location_message( 'writing restart data', .FALSE. )
417
418       CALL check_open( 14 )
419
420       DO  i = 0, io_blocks-1
421          IF ( i == io_group )  THEN
422!
423!--          Write flow field data
424             CALL write_3d_binary
425          ENDIF
426#if defined( __parallel )
427          CALL MPI_BARRIER( comm2d, ierr )
428#endif
429       ENDDO
430
431       CALL location_message( 'finished', .TRUE. )
432
433       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
434
435!
436!--    If required, write particle data
437       IF ( particle_advection )  CALL lpm_write_restart_file
438!
439!--    If required, write urban surface data
440       IF (urban_surface)  CALL usm_write_restart_data
441       
442    ENDIF
443
444!
445!-- If required, repeat output of header including the required CPU-time
446    IF ( myid == 0 )  CALL header
447!
448!-- If required, final land surface and user-defined actions, and
449!-- last actions on the open files and close files. Unit 14 was opened
450!-- in write_3d_binary but it is closed here, to allow writing on this
451!-- unit in routine user_last_actions.
452    CALL cpu_log( log_point(4), 'last actions', 'start' )
453    DO  i = 0, io_blocks-1
454       IF ( i == io_group )  THEN
455          IF ( land_surface )  THEN
456             CALL lsm_last_actions
457          ENDIF
458          IF ( radiation )  THEN
459             CALL radiation_last_actions
460          ENDIF
461          CALL user_last_actions
462          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
463       ENDIF
464#if defined( __parallel )
465       CALL MPI_BARRIER( comm2d, ierr )
466#endif
467    ENDDO
468    CALL close_file( 0 )
469    CALL close_dvrp
470    CALL cpu_log( log_point(4), 'last actions', 'stop' )
471
472#if defined( __mpi2 )
473!
474!-- Test exchange via intercommunicator in case of a MPI-2 coupling
475    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
476       i = 12345 + myid
477       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
478    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
479       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
480       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
481    ENDIF
482#endif
483
484!
485!-- Take final CPU-time for CPU-time analysis
486    CALL cpu_log( log_point(1), 'total', 'stop' )
487    CALL cpu_statistics
488
489#if defined( __parallel )
490    CALL MPI_FINALIZE( ierr )
491#endif
492
493 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.