source: palm/trunk/SOURCE/palm.f90 @ 4824

Last change on this file since 4824 was 4797, checked in by raasch, 4 years ago

files re-formatted to follow the PALM coding standard

  • Property svn:keywords set to Id
File size: 16.9 KB
Line 
1! !> @file palm.f90
2!--------------------------------------------------------------------------------------------------!
3! This file is part of the PALM model system.
4!
5! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
6! Public License as published by the Free Software Foundation, either version 3 of the License, or
7! (at your option) any later version.
8!
9! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
10! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
11! Public License for more details.
12!
13! You should have received a copy of the GNU General Public License along with PALM. If not, see
14! <http://www.gnu.org/licenses/>.
15!
16! Copyright 1997-2020 Leibniz Universitaet Hannover
17!--------------------------------------------------------------------------------------------------!
18!
19! Current revisions:
20! -----------------
21!
22!
23! Former revisions:
24! -----------------
25! $Id: palm.f90 4797 2020-11-26 16:02:39Z moh.hefny $
26! file re-formatted to follow the PALM coding standard
27!
28! 4539 2020-05-18 14:05:17Z raasch
29! log point name changed
30!
31! 4535 2020-05-15 12:07:23Z raasch
32! bugfix for restart data format query
33!
34! 4496 2020-04-15 08:37:26Z raasch
35! bugfix: coupling character added to restart output filename
36!
37! 4495 2020-04-13 20:11:20Z raasch
38! restart data handling with MPI-IO added
39!
40! 4457 2020-03-11 14:20:43Z raasch
41! use statement for exchange horiz added
42!
43! 4444 2020-03-05 15:59:50Z raasch
44! bugfix: cpp-directives for serial mode added
45!
46! 4414 2020-02-19 20:16:04Z suehring
47! Call to module_interface_init_numerics
48!
49! 4400 2020-02-10 20:32:41Z suehring
50! Add interface to initialize data output with dom
51!
52! 4360 2020-01-07 11:25:50Z suehring
53! implement new palm_date_time_mod
54!
55! 4094 2019-07-12 09:24:21Z gronemeier
56! Corrected "Former revisions" section
57!
58! 4039 2019-06-18 10:32:41Z suehring
59! Rename subroutines in module for diagnostic quantities
60!
61! 4017 2019-06-06 12:16:46Z schwenkel
62! new module for calculation and output of diagnostic quantities added
63!
64! 3885 2019-04-11 11:29:34Z kanani
65! Changes related to global restructuring of location messages and introduction
66! of additional debug messages
67!
68! 3761 2019-02-25 15:31:42Z raasch
69! unused variable removed
70!
71! 3719 2019-02-06 13:10:18Z kanani
72! Included cpu measurement for wall/soil spinup
73!
74! 3703 2019-01-29 16:43:53Z knoop
75! Some interface calls moved to module_interface + cleanup
76!
77! 3648 2019-01-02 16:35:46Z suehring
78! Rename subroutines for surface-data output
79!
80! Revision 1.1  1997/07/24 11:23:35  raasch
81! Initial revision
82!
83!
84! Description:
85! ------------
86!> Large-Eddy Simulation (LES) model for atmospheric and oceanic boundary-layer flows,
87!> see the PALM homepage https://palm-model.org for further information
88!--------------------------------------------------------------------------------------------------!
89 PROGRAM palm
90
91
92    USE arrays_3d
93
94#if defined( __parallel )
95    USE bulk_cloud_model_mod,                                                                      &
96        ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
97#endif
98
99    USE control_parameters,                                                                        &
100        ONLY:  coupling_char, do2d_at_begin, do3d_at_begin, io_blocks, io_group, message_string,   &
101               restart_data_format_output, runnr, simulated_time_chr, spinup,                      &
102               time_since_reference_point, user_interface_current_revision,                        &
103               user_interface_required_revision, version, write_binary
104
105#if defined( __parallel )
106    USE control_parameters,                                                                        &
107        ONLY:  child_domain, constant_diffusion, humidity, initializing_actions, neutral,          &
108               passive_scalar
109#endif
110
111    USE cpulog,                                                                                    &
112        ONLY:  cpu_log, cpu_statistics, log_point
113
114#if defined( __parallel )
115    USE cpulog,                                                                                    &
116        ONLY:  log_point_s
117#endif
118
119    USE diagnostic_output_quantities_mod,                                                          &
120        ONLY:  doq_calculate
121
122#if defined( __parallel )
123    USE exchange_horiz_mod,                                                                        &
124        ONLY:  exchange_horiz
125
126    USE indices,                                                                                   &
127        ONLY:  nbgp
128#endif
129
130    USE kinds
131
132    USE module_interface,                                                                          &
133        ONLY:  module_interface_init_numerics,                                                     &
134               module_interface_init_output,                                                       &
135               module_interface_last_actions
136
137
138    USE multi_agent_system_mod,                                                                    &
139        ONLY:  agents_active, mas_last_actions
140
141    USE netcdf_data_input_mod,                                                                     &
142        ONLY:  netcdf_data_input_inquire_file, netcdf_data_input_init,                             &
143               netcdf_data_input_surface_data, netcdf_data_input_topo
144
145    USE pegrid
146
147#if defined( __parallel )
148    USE pmc_particle_interface,                                                                    &
149        ONLY: pmcp_g_alloc_win
150
151    USE pmc_interface,                                                                             &
152        ONLY:  nested_run, pmci_child_initialize, pmci_init, pmci_modelconfiguration,              &
153               pmci_parent_initialize
154#endif
155
156    USE restart_data_mpi_io_mod,                                                                   &
157        ONLY:  rd_mpi_io_close, rd_mpi_io_open
158
159    USE surface_data_output_mod,                                                                   &
160        ONLY:  surface_data_output_last_action
161
162    USE write_restart_data_mod,                                                                    &
163        ONLY:  wrd_global, wrd_local
164
165#if defined( __parallel )  &&  defined( _OPENACC )
166    USE openacc
167#endif
168
169
170    IMPLICIT NONE
171
172!
173!-- Local variables
174    CHARACTER(LEN=9) ::  time_to_string  !<
175
176    INTEGER(iwp)     ::  i                   !< loop counter for blocked I/O
177#if defined( __parallel) && defined( _OPENACC )
178    INTEGER(acc_device_kind) :: device_type      !< device type for OpenACC
179    INTEGER(iwp)             :: local_comm       !< local communicator (shared memory)
180    INTEGER(iwp)             :: local_num_procs  !< local number of processes
181    INTEGER(iwp)             :: local_id         !< local id
182    INTEGER(iwp)             ::  num_devices     !< number of devices visible to OpenACC
183    INTEGER(iwp)             ::  my_device       !< device used by this process
184#endif
185
186    version = 'PALM 6.0'
187    user_interface_required_revision = 'r4495'
188
189#if defined( __parallel )
190!
191!-- MPI initialisation. comm2d is preliminary set, because it will be defined in init_pegrid but is
192!-- used before in cpu_log.
193    CALL MPI_INIT( ierr )
194
195!
196!-- Initialize the coupling for nested-domain runs comm_palm is the communicator which includes all
197!-- PEs (MPI processes) available for this (nested) model. If it is not a nested run, comm_palm is
198!-- returned as MPI_COMM_WORLD.
199    CALL cpu_log( log_point_s(70), 'pmci_init', 'start' )
200    CALL pmci_init( comm_palm )
201    CALL cpu_log( log_point_s(70), 'pmci_init', 'stop' )
202    comm2d = comm_palm
203!
204!-- Get the (preliminary) number of MPI processes and the local PE-id (in case of a further
205!-- communicator splitting in init_coupling, these numbers will be changed in init_pegrid).
206    IF ( nested_run )  THEN
207
208       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
209       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
210
211    ELSE
212
213       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
214       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
215!
216!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm will be splitted
217!--    in init_coupling)
218       CALL init_coupling
219    ENDIF
220
221#ifdef _OPENACC
222!
223!-- Select OpenACC device to use in this process. For this find out how many neighbors there are
224!-- running on the same node and which id this process is.
225    IF ( nested_run )  THEN
226       CALL MPI_COMM_SPLIT_TYPE( comm_palm, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL, local_comm,    &
227                                 ierr )
228    ELSE
229       CALL MPI_COMM_SPLIT_TYPE( MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL,           &
230                                 local_comm, ierr )
231    ENDIF
232    CALL MPI_COMM_SIZE( local_comm, local_num_procs, ierr )
233    CALL MPI_COMM_RANK( local_comm, local_id, ierr )
234
235!
236!-- This loop including the barrier is a workaround for PGI compiler versions up to and including
237!-- 18.4. Later releases are able to select their GPUs in parallel, without running into spurious
238!-- errors.
239    DO i = 0, local_num_procs-1
240       CALL MPI_BARRIER( local_comm, ierr )
241
242       IF ( i == local_id )  THEN
243          device_type = acc_get_device_type()
244          num_devices = acc_get_num_devices( device_type )
245          my_device = MOD( local_id, num_devices )
246          CALL acc_set_device_num( my_device, device_type )
247       ENDIF
248    ENDDO
249
250    CALL MPI_COMM_FREE( local_comm, ierr )
251#endif
252#endif
253
254!
255!-- Initialize measuring of the CPU-time remaining to the run
256    CALL local_tremain_ini
257
258!
259!-- Start of total CPU time measuring.
260    CALL cpu_log( log_point(1), 'total', 'start' )
261    CALL cpu_log( log_point(2), 'initialisation', 'start' )
262
263!
264!-- Open a file for debug output
265    WRITE (myid_char,'(''_'',I6.6)')  myid
266    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
267
268!
269!-- Initialize dvrp logging. Also, one PE maybe split from the global communicator for doing the
270!-- dvrp output. In that case, the number of PEs available for PALM is reduced by one and
271!-- communicator comm_palm is changed respectively.
272#if defined( __parallel )
273    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
274#endif
275
276!
277!-- Read control parameters from NAMELIST files and read environment-variables
278    CALL parin
279
280!
281!-- Check for the user's interface version
282    IF ( user_interface_current_revision /= user_interface_required_revision )  THEN
283       message_string = 'current user-interface revision "' //                                     &
284                        TRIM( user_interface_current_revision ) // '" does ' //                    &
285                        'not match the required revision ' //                                      &
286                        TRIM( user_interface_required_revision )
287        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
288    ENDIF
289
290!
291!-- Determine processor topology and local array indices
292    CALL init_pegrid
293!
294!-- Check if input file according to input-data standard exists
295    CALL netcdf_data_input_inquire_file
296!
297!-- Read topography input data if required. This is required before the numerical grid is finally
298!-- created in init_grid.
299    CALL netcdf_data_input_topo
300!
301!-- Generate grid parameters, initialize generic topography and further process topography
302!-- information if required.
303    CALL init_grid
304!
305!-- Initialize boundary conditions and numerics such as the multigrid solver or the advection
306!-- routine
307    CALL module_interface_init_numerics
308!
309!-- Read global attributes if available.
310    CALL netcdf_data_input_init
311!
312!-- Read surface classification data, e.g. vegetation and soil types, water surfaces, etc., if
313!-- available. Some of these data is required before check parameters is invoked.
314    CALL netcdf_data_input_surface_data
315!
316!-- Check control parameters and deduce further quantities
317    CALL check_parameters
318
319    CALL init_3d_model
320
321    CALL module_interface_init_output
322
323#if defined( __parallel )
324!
325!-- Coupling protocol setup for nested-domain runs
326    IF ( nested_run )  THEN
327       CALL pmci_modelconfiguration
328!
329!--    Receive and interpolate initial data on children.
330!--    Child initialization must be made first if the model is both child and parent if necessary.
331       IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
332          CALL pmci_child_initialize
333!
334!--       Send initial condition data from parent to children
335          CALL pmci_parent_initialize
336!
337!--       Exchange_horiz is needed after the nest initialization
338          IF ( child_domain )  THEN
339             CALL exchange_horiz( u, nbgp )
340             CALL exchange_horiz( v, nbgp )
341             CALL exchange_horiz( w, nbgp )
342             IF ( .NOT. neutral )  THEN
343                CALL exchange_horiz( pt, nbgp )
344             ENDIF
345             IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
346             IF ( humidity )  THEN
347                CALL exchange_horiz( q, nbgp )
348                IF ( bulk_cloud_model  .AND.  microphysics_morrison )  THEN
349                  CALL exchange_horiz( qc, nbgp )
350                  CALL exchange_horiz( nc, nbgp )
351                ENDIF
352                IF ( bulk_cloud_model  .AND.  microphysics_seifert )  THEN
353                   CALL exchange_horiz( qr, nbgp )
354                   CALL exchange_horiz( nr, nbgp )
355                ENDIF
356             ENDIF
357             IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
358          ENDIF
359       ENDIF
360
361       CALL pmcp_g_alloc_win                    ! Must be called after pmci_child_initialize and pmci_parent_initialize
362    ENDIF
363#endif
364
365!
366!-- Output of program header
367    IF ( myid == 0 )  CALL header
368
369    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
370
371!
372!-- Integration of the non-atmospheric equations (land surface model, urban surface model)
373    IF ( spinup )  THEN
374       CALL cpu_log( log_point(41), 'wall/soil spinup', 'start' )
375       CALL time_integration_spinup
376       CALL cpu_log( log_point(41), 'wall/soil spinup', 'stop' )
377    ENDIF
378
379!
380!-- Set start time in format hh:mm:ss
381    simulated_time_chr = time_to_string( time_since_reference_point )
382
383!
384!-- If required, output of initial arrays
385    IF ( do2d_at_begin )  THEN
386       CALL doq_calculate    !TODO, will be called twice
387
388       CALL data_output_2d( 'xy', 0 )
389       CALL data_output_2d( 'xz', 0 )
390       CALL data_output_2d( 'yz', 0 )
391    ENDIF
392
393    IF ( do3d_at_begin )  THEN
394       CALL doq_calculate    !TODO, will be called twice
395
396       CALL data_output_3d( 0 )
397    ENDIF
398
399!
400!-- Integration of the model equations using timestep-scheme
401    CALL time_integration
402
403!
404!-- If required, write binary data for restart runs
405    IF ( write_binary )  THEN
406
407       CALL cpu_log( log_point(22), 'write-restart-data', 'start' )
408
409       CALL location_message( 'writing restart data', 'start' )
410
411       IF ( TRIM( restart_data_format_output ) == 'fortran_binary' )  THEN
412
413          DO  i = 0, io_blocks-1
414             IF ( i == io_group )  THEN
415
416!
417!--             Open binary file
418                CALL check_open( 14 )
419!
420!--             Write control parameters and other global variables for restart.
421                IF ( myid == 0 )  CALL wrd_global
422!
423!--             Write processor specific flow field data for restart runs
424                CALL wrd_local
425!
426!--             Close binary file
427                CALL close_file( 14 )
428
429             ENDIF
430#if defined( __parallel )
431             CALL MPI_BARRIER( comm2d, ierr )
432#endif
433          ENDDO
434
435       ELSEIF ( restart_data_format_output(1:3) == 'mpi' )  THEN
436!
437!--       Open MPI-IO restart file
438          CALL rd_mpi_io_open( 'write', 'BINOUT' // TRIM( coupling_char ) )
439!
440!--       Write control parameters and other global variables for restart.
441          CALL wrd_global
442!
443!--       Write processor specific flow field data for restart runs
444          CALL wrd_local
445!
446!--       Close restart File
447          CALL rd_mpi_io_close
448
449       ENDIF
450
451       CALL location_message( 'writing restart data', 'finished' )
452
453       CALL cpu_log( log_point(22), 'write-restart-data', 'stop' )
454
455    ENDIF
456!
457!-- Last actions for surface output, for instantaneous and time-averaged data
458    CALL surface_data_output_last_action( 0 )
459    CALL surface_data_output_last_action( 1 )
460
461!
462!-- If required, repeat output of header including the required CPU-time
463    IF ( myid == 0 )  CALL header
464!
465!-- Perform module specific last actions
466    CALL cpu_log( log_point(4), 'last actions', 'start' )
467
468    IF ( myid == 0 .AND. agents_active )  CALL mas_last_actions ! ToDo: move to module_interface
469
470    CALL module_interface_last_actions
471
472    CALL cpu_log( log_point(4), 'last actions', 'stop' )
473
474!
475!-- Close files
476    CALL close_file( 0 )
477
478!
479!-- Write run number to file (used by palmrun to create unified cycle numbers for output files).
480    IF ( myid == 0  .AND.  runnr > 0 )  THEN
481       OPEN( 90, FILE='RUN_NUMBER', FORM='FORMATTED' )
482       WRITE( 90, '(I4)' )  runnr
483       CLOSE( 90 )
484    ENDIF
485
486!
487!-- Take final CPU-time for CPU-time analysis
488    CALL cpu_log( log_point(1), 'total', 'stop' )
489    CALL cpu_statistics
490
491#if defined( __parallel )
492    CALL MPI_FINALIZE( ierr )
493#endif
494
495 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.