source: palm/trunk/SOURCE/palm.f90 @ 2119

Last change on this file since 2119 was 2119, checked in by raasch, 4 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 15.2 KB
Line 
1!> @file palm.f90
2!------------------------------------------------------------------------------!
3! This file is part of PALM.
4!
5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2017 Leibniz Universitaet Hannover
18!------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 2119 2017-01-17 16:51:50Z raasch $
27!
28! 2118 2017-01-17 16:38:49Z raasch
29! OpenACC directives and related code removed
30!
31! 2011 2016-09-19 17:29:57Z kanani
32! Flag urban_surface is now defined in module control_parameters.
33!
34! 2007 2016-08-24 15:47:17Z kanani
35! Temporarily added CALL for writing of restart data for urban surface model
36!
37! 2000 2016-08-20 18:09:15Z knoop
38! Forced header and separation lines into 80 columns
39!
40! 1976 2016-07-27 13:28:04Z maronga
41! Added call to radiation_last_actions for binary output of land surface model
42! data
43!
44! 1972 2016-07-26 07:52:02Z maronga
45! Added call to lsm_last_actions for binary output of land surface model data
46!
47! 1960 2016-07-12 16:34:24Z suehring
48! Separate humidity and passive scalar
49!
50! 1834 2016-04-07 14:34:20Z raasch
51! Initial version of purely vertical nesting introduced.
52!
53! 1833 2016-04-07 14:23:03Z raasch
54! required user interface version changed
55!
56! 1808 2016-04-05 19:44:00Z raasch
57! routine local_flush replaced by FORTRAN statement
58!
59! 1783 2016-03-06 18:36:17Z raasch
60! required user interface version changed
61!
62! 1781 2016-03-03 15:12:23Z raasch
63! pmc initialization moved from time_integration to here
64!
65! 1779 2016-03-03 08:01:28Z raasch
66! setting of nest_domain and coupling_char moved to the pmci
67!
68! 1764 2016-02-28 12:45:19Z raasch
69! cpp-statements for nesting removed, communicator settings cleaned up
70!
71! 1762 2016-02-25 12:31:13Z hellstea
72! Introduction of nested domain feature
73!
74! 1747 2016-02-08 12:25:53Z raasch
75! OpenACC-adjustment for new surface layer parameterization
76!
77! 1682 2015-10-07 23:56:08Z knoop
78! Code annotations made doxygen readable
79!
80! 1668 2015-09-23 13:45:36Z raasch
81! warning replaced by abort in case of failed user interface check
82!
83! 1666 2015-09-23 07:31:10Z raasch
84! check for user's interface version added
85!
86! 1482 2014-10-18 12:34:45Z raasch
87! adjustments for using CUDA-aware OpenMPI
88!
89! 1468 2014-09-24 14:06:57Z maronga
90! Adapted for use on up to 6-digit processor cores
91!
92! 1402 2014-05-09 14:25:13Z raasch
93! location messages added
94!
95! 1374 2014-04-25 12:55:07Z raasch
96! bugfix: various modules added
97!
98! 1320 2014-03-20 08:40:49Z raasch
99! ONLY-attribute added to USE-statements,
100! kind-parameters added to all INTEGER and REAL declaration statements,
101! kinds are defined in new module kinds,
102! old module precision_kind is removed,
103! revision history before 2012 removed,
104! comment fields (!:) to be used for variable explanations added to
105! all variable declaration statements
106!
107! 1318 2014-03-17 13:35:16Z raasch
108! module interfaces removed
109!
110! 1241 2013-10-30 11:36:58Z heinze
111! initialization of nuding and large scale forcing from external file
112!
113! 1221 2013-09-10 08:59:13Z raasch
114! +wall_flags_00, rflags_invers, rflags_s_inner in copyin statement
115!
116! 1212 2013-08-15 08:46:27Z raasch
117! +tri in copyin statement
118!
119! 1179 2013-06-14 05:57:58Z raasch
120! ref_state added to copyin-list
121!
122! 1113 2013-03-10 02:48:14Z raasch
123! openACC statements modified
124!
125! 1111 2013-03-08 23:54:10Z raasch
126! openACC statements updated
127!
128! 1092 2013-02-02 11:24:22Z raasch
129! unused variables removed
130!
131! 1036 2012-10-22 13:43:42Z raasch
132! code put under GPL (PALM 3.9)
133!
134! 1015 2012-09-27 09:23:24Z raasch
135! Version number changed from 3.8 to 3.8a.
136! OpenACC statements added + code changes required for GPU optimization
137!
138! 849 2012-03-15 10:35:09Z raasch
139! write_particles renamed lpm_write_restart_file
140!
141! Revision 1.1  1997/07/24 11:23:35  raasch
142! Initial revision
143!
144!
145! Description:
146! ------------
147!> Large-Eddy Simulation (LES) model for the convective boundary layer,
148!> optimized for use on parallel machines (implementation realized using the
149!> Message Passing Interface (MPI)). The model can also be run on vector machines
150!> (less well optimized) and workstations. Versions for the different types of
151!> machines are controlled via cpp-directives.
152!> Model runs are only feasible using the ksh-script mrun.
153!>
154!> @todo create routine last_actions instead of calling lsm_last_actions etc.
155!> @todo eventually move CALL usm_write_restart_data to suitable location
156!------------------------------------------------------------------------------!
157 PROGRAM palm
158 
159
160    USE arrays_3d
161
162    USE control_parameters,                                                    &
163        ONLY:  constant_diffusion, coupling_char, coupling_mode,               &
164               do2d_at_begin, do3d_at_begin, humidity, io_blocks, io_group,    &
165               large_scale_forcing, message_string, nest_domain, neutral,      &
166               nudging, passive_scalar, simulated_time, simulated_time_chr,    &
167               urban_surface,                                                  &
168               user_interface_current_revision,                                &
169               user_interface_required_revision, version, wall_heatflux,       &
170               write_binary
171
172    USE cpulog,                                                                &
173        ONLY:  cpu_log, log_point, cpu_statistics
174
175    USE grid_variables,                                                        &
176        ONLY:  fxm, fxp, fym, fyp, fwxm, fwxp, fwym, fwyp, wall_e_x, wall_e_y, &
177               wall_u, wall_v, wall_w_x, wall_w_y
178
179    USE indices,                                                               &
180        ONLY:  nbgp, ngp_2dh, ngp_2dh_s_inner, nzb_diff_s_inner, nzb_diff_s_outer,   &
181               nzb_diff_u, nzb_diff_v, nzb_s_inner, nzb_s_outer, nzb_u_inner,  &
182               nzb_u_outer, nzb_v_inner, nzb_v_outer, nzb_w_inner,             &
183               nzb_w_outer, rflags_invers, rflags_s_inner, wall_flags_0,       &
184               wall_flags_00
185
186    USE kinds
187
188    USE land_surface_model_mod,                                                &
189        ONLY:  land_surface, lsm_last_actions
190
191    USE ls_forcing_mod,                                                        &
192        ONLY:  init_ls_forcing
193
194    USE nudge_mod,                                                             &
195        ONLY:  init_nudge
196
197    USE particle_attributes,                                                   &
198        ONLY:  particle_advection
199
200    USE pegrid
201
202    USE pmc_interface,                                                         &
203        ONLY:  cpl_id, nested_run, pmci_child_initialize, pmci_init,           &
204               pmci_modelconfiguration, pmci_parent_initialize
205
206    USE radiation_model_mod,                                                   &
207        ONLY:  radiation, radiation_last_actions
208
209    USE statistics,                                                            &
210        ONLY:  hom, rmask, weight_pres, weight_substep
211
212    USE surface_layer_fluxes_mod,                                              &
213        ONLY:  pt1, qv1, uv_total
214       
215    USE urban_surface_mod,                                                     &
216        ONLY:  usm_write_restart_data       
217
218    IMPLICIT NONE
219
220!
221!-- Local variables
222    CHARACTER(LEN=9)  ::  time_to_string  !<
223    CHARACTER(LEN=10) ::  env_string      !< to store string of environment var
224    INTEGER(iwp)      ::  env_stat        !< to hold status of GET_ENV
225    INTEGER(iwp)      ::  i               !<
226    INTEGER(iwp)      ::  myid_openmpi    !< OpenMPI local rank for CUDA aware MPI
227
228    version = 'PALM 4.0'
229    user_interface_required_revision = 'r1819'
230
231#if defined( __parallel )
232!
233!-- MPI initialisation. comm2d is preliminary set, because
234!-- it will be defined in init_pegrid but is used before in cpu_log.
235    CALL MPI_INIT( ierr )
236
237!
238!-- Initialize the coupling for nested-domain runs
239!-- comm_palm is the communicator which includes all PEs (MPI processes)
240!-- available for this (nested) model. If it is not a nested run, comm_palm
241!-- is returned as MPI_COMM_WORLD
242    CALL pmci_init( comm_palm )
243    comm2d = comm_palm
244!
245!-- Get the (preliminary) number of MPI processes and the local PE-id (in case
246!-- of a further communicator splitting in init_coupling, these numbers will
247!-- be changed in init_pegrid).
248    IF ( nested_run )  THEN
249
250       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
251       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
252
253    ELSE
254
255       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
256       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
257!
258!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
259!--    will be splitted in init_coupling)
260       CALL init_coupling
261    ENDIF
262#endif
263
264!
265!-- Initialize measuring of the CPU-time remaining to the run
266    CALL local_tremain_ini
267
268!
269!-- Start of total CPU time measuring.
270    CALL cpu_log( log_point(1), 'total', 'start' )
271    CALL cpu_log( log_point(2), 'initialisation', 'start' )
272
273!
274!-- Open a file for debug output
275    WRITE (myid_char,'(''_'',I6.6)')  myid
276    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
277
278!
279!-- Initialize dvrp logging. Also, one PE maybe split from the global
280!-- communicator for doing the dvrp output. In that case, the number of
281!-- PEs available for PALM is reduced by one and communicator comm_palm
282!-- is changed respectively.
283#if defined( __parallel )
284    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
285!
286!-- TEST OUTPUT (TO BE REMOVED)
287    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
288    FLUSH( 9 )
289    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
290       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
291               TRIM( coupling_mode )
292    ENDIF
293#endif
294
295    CALL init_dvrp_logging
296
297!
298!-- Read control parameters from NAMELIST files and read environment-variables
299    CALL parin
300
301!
302!-- Check for the user's interface version
303    IF ( user_interface_current_revision /= user_interface_required_revision )  &
304    THEN
305       message_string = 'current user-interface revision "' //                  &
306                        TRIM( user_interface_current_revision ) // '" does ' // &
307                        'not match the required revision ' //                   &
308                        TRIM( user_interface_required_revision )
309        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
310    ENDIF
311
312!
313!-- Determine processor topology and local array indices
314    CALL init_pegrid
315
316!
317!-- Generate grid parameters
318    CALL init_grid
319
320!
321!-- Initialize nudging if required
322    IF ( nudging )  THEN
323       CALL init_nudge
324    ENDIF
325
326!
327!-- Initialize reading of large scale forcing from external file - if required
328    IF ( large_scale_forcing )  THEN
329       CALL init_ls_forcing
330    ENDIF
331
332!
333!-- Check control parameters and deduce further quantities
334    CALL check_parameters
335
336!
337!-- Initialize all necessary variables
338    CALL init_3d_model
339
340!
341!-- Coupling protocol setup for nested-domain runs
342    IF ( nested_run )  THEN
343       CALL pmci_modelconfiguration
344!
345!--    Receive and interpolate initial data on children.
346!--    Child initialization must be made first if the model is both child and
347!--    parent
348       CALL pmci_child_initialize
349!
350!--    Send initial condition data from parent to children
351       CALL pmci_parent_initialize
352!
353!--    Exchange_horiz is needed after the nest initialization
354       IF ( nest_domain )  THEN
355          CALL exchange_horiz( u, nbgp )
356          CALL exchange_horiz( v, nbgp )
357          CALL exchange_horiz( w, nbgp )
358          IF ( .NOT. neutral )  THEN
359             CALL exchange_horiz( pt, nbgp )
360          ENDIF
361          IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
362          IF ( humidity       )  CALL exchange_horiz( q, nbgp )
363          IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
364       ENDIF
365
366    ENDIF
367
368!
369!-- Output of program header
370    IF ( myid == 0 )  CALL header
371
372    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
373
374!
375!-- Set start time in format hh:mm:ss
376    simulated_time_chr = time_to_string( simulated_time )
377
378!
379!-- If required, output of initial arrays
380    IF ( do2d_at_begin )  THEN
381       CALL data_output_2d( 'xy', 0 )
382       CALL data_output_2d( 'xz', 0 )
383       CALL data_output_2d( 'yz', 0 )
384    ENDIF
385
386    IF ( do3d_at_begin )  THEN
387       CALL data_output_3d( 0 )
388    ENDIF
389
390!
391!-- Integration of the model equations using timestep-scheme
392    CALL time_integration
393
394!
395!-- If required, write binary data for restart runs
396    IF ( write_binary(1:4) == 'true' )  THEN
397
398       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
399
400       CALL location_message( 'writing restart data', .FALSE. )
401
402       CALL check_open( 14 )
403
404       DO  i = 0, io_blocks-1
405          IF ( i == io_group )  THEN
406!
407!--          Write flow field data
408             CALL write_3d_binary
409          ENDIF
410#if defined( __parallel )
411          CALL MPI_BARRIER( comm2d, ierr )
412#endif
413       ENDDO
414
415       CALL location_message( 'finished', .TRUE. )
416
417       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
418
419!
420!--    If required, write particle data
421       IF ( particle_advection )  CALL lpm_write_restart_file
422!
423!--    If required, write urban surface data
424       IF (urban_surface)  CALL usm_write_restart_data
425       
426    ENDIF
427
428!
429!-- If required, repeat output of header including the required CPU-time
430    IF ( myid == 0 )  CALL header
431!
432!-- If required, final land surface and user-defined actions, and
433!-- last actions on the open files and close files. Unit 14 was opened
434!-- in write_3d_binary but it is closed here, to allow writing on this
435!-- unit in routine user_last_actions.
436    CALL cpu_log( log_point(4), 'last actions', 'start' )
437    DO  i = 0, io_blocks-1
438       IF ( i == io_group )  THEN
439          IF ( land_surface )  THEN
440             CALL lsm_last_actions
441          ENDIF
442          IF ( radiation )  THEN
443             CALL radiation_last_actions
444          ENDIF
445          CALL user_last_actions
446          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
447       ENDIF
448#if defined( __parallel )
449       CALL MPI_BARRIER( comm2d, ierr )
450#endif
451    ENDDO
452    CALL close_file( 0 )
453    CALL close_dvrp
454    CALL cpu_log( log_point(4), 'last actions', 'stop' )
455
456#if defined( __mpi2 )
457!
458!-- Test exchange via intercommunicator in case of a MPI-2 coupling
459    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
460       i = 12345 + myid
461       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
462    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
463       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
464       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
465    ENDIF
466#endif
467
468!
469!-- Take final CPU-time for CPU-time analysis
470    CALL cpu_log( log_point(1), 'total', 'stop' )
471    CALL cpu_statistics
472
473#if defined( __parallel )
474    CALL MPI_FINALIZE( ierr )
475#endif
476
477 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.