source: palm/trunk/SOURCE/palm.f90 @ 4495

Last change on this file since 4495 was 4495, checked in by raasch, 5 years ago

restart data handling with MPI-IO added, first part

  • Property svn:keywords set to Id
File size: 16.0 KB
Line 
1! !> @file palm.f90
2!------------------------------------------------------------------------------!
3! This file is part of the PALM model system.
4!
5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2020 Leibniz Universitaet Hannover
18!------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 4495 2020-04-13 20:11:20Z raasch $
27! restart data handling with MPI-IO added
28!
29! 4457 2020-03-11 14:20:43Z raasch
30! use statement for exchange horiz added
31!
32! 4444 2020-03-05 15:59:50Z raasch
33! bugfix: cpp-directives for serial mode added
34!
35! 4414 2020-02-19 20:16:04Z suehring
36! Call to module_interface_init_numerics
37!
38! 4400 2020-02-10 20:32:41Z suehring
39! Add interface to initialize data output with dom
40!
41! 4360 2020-01-07 11:25:50Z suehring
42! implement new palm_date_time_mod
43!
44! 4094 2019-07-12 09:24:21Z gronemeier
45! Corrected "Former revisions" section
46!
47! 4039 2019-06-18 10:32:41Z suehring
48! Rename subroutines in module for diagnostic quantities
49!
50! 4017 2019-06-06 12:16:46Z schwenkel
51! new module for calculation and output of diagnostic quantities added
52!
53! 3885 2019-04-11 11:29:34Z kanani
54! Changes related to global restructuring of location messages and introduction
55! of additional debug messages
56!
57! 3761 2019-02-25 15:31:42Z raasch
58! unused variable removed
59!
60! 3719 2019-02-06 13:10:18Z kanani
61! Included cpu measurement for wall/soil spinup
62!
63! 3703 2019-01-29 16:43:53Z knoop
64! Some interface calls moved to module_interface + cleanup
65!
66! 3648 2019-01-02 16:35:46Z suehring
67! Rename subroutines for surface-data output
68!
69! Revision 1.1  1997/07/24 11:23:35  raasch
70! Initial revision
71!
72!
73! Description:
74! ------------
75!> Large-Eddy Simulation (LES) model for atmospheric and oceanic boundary-layer
76!> flows
77!> see the PALM homepage https://palm-model.org for further information
78!------------------------------------------------------------------------------!
79 PROGRAM palm
80 
81
82    USE arrays_3d
83
84#if defined( __parallel )
85    USE bulk_cloud_model_mod,                                                  &
86        ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
87#endif
88
89    USE control_parameters,                                                    &
90        ONLY:  coupling_char, do2d_at_begin, do3d_at_begin, io_blocks,         &
91               io_group, message_string, restart_data_format_output, runnr, simulated_time_chr, spinup,   &
92               time_since_reference_point, user_interface_current_revision,    &
93               user_interface_required_revision, version, write_binary
94
95#if defined( __parallel )
96    USE control_parameters,                                                    &
97        ONLY:  child_domain, constant_diffusion, humidity,                     &
98               initializing_actions, neutral, passive_scalar
99#endif
100
101    USE cpulog,                                                                &
102        ONLY:  cpu_log, log_point, cpu_statistics
103
104#if defined( __parallel )
105    USE cpulog,                                                                &
106        ONLY:  log_point_s
107#endif
108
109    USE diagnostic_output_quantities_mod,                                      &
110        ONLY:  doq_calculate
111
112#if defined( __parallel )
113    USE exchange_horiz_mod,                                                    &
114        ONLY:  exchange_horiz
115
116    USE indices,                                                               &
117        ONLY:  nbgp
118#endif
119
120    USE kinds
121
122    USE module_interface,                                                      &
123        ONLY:  module_interface_init_numerics,                                 &
124               module_interface_init_output,                                   &
125               module_interface_last_actions
126
127
128    USE multi_agent_system_mod,                                                &
129        ONLY:  agents_active, mas_last_actions
130
131    USE netcdf_data_input_mod,                                                 &
132        ONLY:  netcdf_data_input_inquire_file, netcdf_data_input_init,         &
133               netcdf_data_input_surface_data, netcdf_data_input_topo
134
135    USE pegrid
136
137#if defined( __parallel )
138    USE pmc_particle_interface,                                                &
139        ONLY: pmcp_g_alloc_win
140
141    USE pmc_interface,                                                         &
142        ONLY:  nested_run, pmci_child_initialize, pmci_init,                   &
143               pmci_modelconfiguration, pmci_parent_initialize
144#endif
145
146    USE restart_data_mpi_io_mod,                                               &
147        ONLY:  rd_mpi_io_close, rd_mpi_io_open
148
149    USE surface_data_output_mod,                                               &
150        ONLY:  surface_data_output_last_action
151
152    USE write_restart_data_mod,                                                &
153        ONLY:  wrd_global, wrd_local
154
155#if defined( __parallel )  &&  defined( _OPENACC )
156    USE openacc
157#endif
158
159
160    IMPLICIT NONE
161
162!
163!-- Local variables
164    CHARACTER(LEN=9) ::  time_to_string  !<
165    INTEGER(iwp)     ::  i               !< loop counter for blocked I/O
166#if defined( __parallel) && defined( _OPENACC )
167    INTEGER(iwp)     :: local_comm       !< local communicator (shared memory)
168    INTEGER(iwp)     :: local_num_procs  !< local number of processes
169    INTEGER(iwp)     :: local_id         !< local id
170    INTEGER(acc_device_kind) :: device_type !< device type for OpenACC
171    INTEGER(iwp)     ::  num_devices     !< number of devices visible to OpenACC
172    INTEGER(iwp)     ::  my_device       !< device used by this process
173#endif
174
175    version = 'PALM 6.0'
176    user_interface_required_revision = 'r4495'
177
178#if defined( __parallel )
179!
180!-- MPI initialisation. comm2d is preliminary set, because
181!-- it will be defined in init_pegrid but is used before in cpu_log.
182    CALL MPI_INIT( ierr )
183
184!
185!-- Initialize the coupling for nested-domain runs
186!-- comm_palm is the communicator which includes all PEs (MPI processes)
187!-- available for this (nested) model. If it is not a nested run, comm_palm
188!-- is returned as MPI_COMM_WORLD
189    CALL cpu_log( log_point_s(70), 'pmci_init', 'start' )
190    CALL pmci_init( comm_palm )
191    CALL cpu_log( log_point_s(70), 'pmci_init', 'stop' )
192    comm2d = comm_palm
193!
194!-- Get the (preliminary) number of MPI processes and the local PE-id (in case
195!-- of a further communicator splitting in init_coupling, these numbers will
196!-- be changed in init_pegrid).
197    IF ( nested_run )  THEN
198
199       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
200       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
201
202    ELSE
203
204       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
205       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
206!
207!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
208!--    will be splitted in init_coupling)
209       CALL init_coupling
210    ENDIF
211
212#ifdef _OPENACC
213!
214!-- Select OpenACC device to use in this process. For this find out how many
215!-- neighbors there are running on the same node and which id this process is.
216    IF ( nested_run )  THEN
217       CALL MPI_COMM_SPLIT_TYPE( comm_palm, MPI_COMM_TYPE_SHARED, 0,           &
218                                 MPI_INFO_NULL, local_comm, ierr )
219    ELSE
220       CALL MPI_COMM_SPLIT_TYPE( MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, 0,      &
221                                 MPI_INFO_NULL, local_comm, ierr )
222    ENDIF
223    CALL MPI_COMM_SIZE( local_comm, local_num_procs, ierr )
224    CALL MPI_COMM_RANK( local_comm, local_id, ierr )
225
226!
227!-- This loop including the barrier is a workaround for PGI compiler versions
228!-- up to and including 18.4. Later releases are able to select their GPUs in
229!-- parallel, without running into spurious errors.
230    DO i = 0, local_num_procs-1
231       CALL MPI_BARRIER( local_comm, ierr )
232
233       IF ( i == local_id )  THEN
234          device_type = acc_get_device_type()
235          num_devices = acc_get_num_devices( device_type )
236          my_device = MOD( local_id, num_devices )
237          CALL acc_set_device_num( my_device, device_type )
238       ENDIF
239    ENDDO
240
241    CALL MPI_COMM_FREE( local_comm, ierr )
242#endif
243#endif
244
245!
246!-- Initialize measuring of the CPU-time remaining to the run
247    CALL local_tremain_ini
248
249!
250!-- Start of total CPU time measuring.
251    CALL cpu_log( log_point(1), 'total', 'start' )
252    CALL cpu_log( log_point(2), 'initialisation', 'start' )
253
254!
255!-- Open a file for debug output
256    WRITE (myid_char,'(''_'',I6.6)')  myid
257    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
258
259!
260!-- Initialize dvrp logging. Also, one PE maybe split from the global
261!-- communicator for doing the dvrp output. In that case, the number of
262!-- PEs available for PALM is reduced by one and communicator comm_palm
263!-- is changed respectively.
264#if defined( __parallel )
265    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
266#endif
267
268!
269!-- Read control parameters from NAMELIST files and read environment-variables
270    CALL parin
271
272!
273!-- Check for the user's interface version
274    IF ( user_interface_current_revision /= user_interface_required_revision )  &
275    THEN
276       message_string = 'current user-interface revision "' //                  &
277                        TRIM( user_interface_current_revision ) // '" does ' // &
278                        'not match the required revision ' //                   &
279                        TRIM( user_interface_required_revision )
280        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
281    ENDIF
282
283!
284!-- Determine processor topology and local array indices
285    CALL init_pegrid
286!
287!-- Check if input file according to input-data standard exists
288    CALL netcdf_data_input_inquire_file
289!
290!-- Read topography input data if required. This is required before the
291!-- numerical grid is finally created in init_grid
292    CALL netcdf_data_input_topo 
293!
294!-- Generate grid parameters, initialize generic topography and further process
295!-- topography information if required
296    CALL init_grid
297!
298!-- Initialize boundary conditions and numerics such as the multigrid solver or
299!-- the advection routine
300    CALL module_interface_init_numerics
301!
302!-- Read global attributes if available. 
303    CALL netcdf_data_input_init 
304!
305!-- Read surface classification data, e.g. vegetation and soil types, water
306!-- surfaces, etc., if available. Some of these data is required before
307!-- check parameters is invoked.     
308    CALL netcdf_data_input_surface_data
309!
310!-- Check control parameters and deduce further quantities
311    CALL check_parameters
312
313    CALL init_3d_model
314
315    CALL module_interface_init_output
316
317#if defined( __parallel )
318!
319!-- Coupling protocol setup for nested-domain runs
320    IF ( nested_run )  THEN
321       CALL pmci_modelconfiguration
322!
323!--    Receive and interpolate initial data on children.
324!--    Child initialization must be made first if the model is both child and
325!--    parent if necessary
326       IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
327          CALL pmci_child_initialize
328!
329!--       Send initial condition data from parent to children
330          CALL pmci_parent_initialize
331!
332!--       Exchange_horiz is needed after the nest initialization
333          IF ( child_domain )  THEN
334             CALL exchange_horiz( u, nbgp )
335             CALL exchange_horiz( v, nbgp )
336             CALL exchange_horiz( w, nbgp )
337             IF ( .NOT. neutral )  THEN
338                CALL exchange_horiz( pt, nbgp )
339             ENDIF
340             IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
341             IF ( humidity )  THEN
342                CALL exchange_horiz( q, nbgp )
343                IF ( bulk_cloud_model  .AND.  microphysics_morrison )  THEN
344                  CALL exchange_horiz( qc, nbgp )
345                  CALL exchange_horiz( nc, nbgp )
346                ENDIF
347                IF ( bulk_cloud_model  .AND.  microphysics_seifert )  THEN
348                   CALL exchange_horiz( qr, nbgp ) 
349                   CALL exchange_horiz( nr, nbgp )
350                ENDIF
351             ENDIF
352             IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
353          ENDIF
354       ENDIF
355
356       CALL pmcp_g_alloc_win                    ! Must be called after pmci_child_initialize and pmci_parent_initialize
357    ENDIF
358#endif
359
360!
361!-- Output of program header
362    IF ( myid == 0 )  CALL header
363
364    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
365
366!
367!-- Integration of the non-atmospheric equations (land surface model, urban
368!-- surface model)
369    IF ( spinup )  THEN
370       CALL cpu_log( log_point(41), 'wall/soil spinup', 'start' )
371       CALL time_integration_spinup
372       CALL cpu_log( log_point(41), 'wall/soil spinup', 'stop' )
373    ENDIF
374
375!
376!-- Set start time in format hh:mm:ss
377    simulated_time_chr = time_to_string( time_since_reference_point )
378
379!
380!-- If required, output of initial arrays
381    IF ( do2d_at_begin )  THEN
382       CALL doq_calculate    !TODO, will be called twice
383
384       CALL data_output_2d( 'xy', 0 )
385       CALL data_output_2d( 'xz', 0 )
386       CALL data_output_2d( 'yz', 0 )
387    ENDIF
388
389    IF ( do3d_at_begin )  THEN
390       CALL doq_calculate    !TODO, will be called twice
391
392       CALL data_output_3d( 0 )
393    ENDIF
394
395!
396!-- Integration of the model equations using timestep-scheme
397    CALL time_integration
398
399!
400!-- If required, write binary data for restart runs
401    IF ( write_binary )  THEN
402
403       CALL cpu_log( log_point(22), 'wrd_local', 'start' )
404
405       CALL location_message( 'writing restart data', 'start' )
406
407       IF ( TRIM( restart_data_format_output ) == 'fortran_binary' )  THEN
408
409          DO  i = 0, io_blocks-1
410             IF ( i == io_group )  THEN
411
412!
413!--             Open binary file
414                CALL check_open( 14 )
415!
416!--             Write control parameters and other global variables for restart.
417                IF ( myid == 0 )  CALL wrd_global
418!
419!--             Write processor specific flow field data for restart runs
420                CALL wrd_local
421!
422!--             Close binary file
423                CALL close_file( 14 )
424
425             ENDIF
426#if defined( __parallel )
427             CALL MPI_BARRIER( comm2d, ierr )
428#endif
429          ENDDO
430
431       ELSEIF ( TRIM( restart_data_format_output ) == 'mpi' )  THEN
432!
433!--       Open MPI-IO restart file
434          CALL rd_mpi_io_open( 'write', 'BINOUT' )
435!
436!--       Write control parameters and other global variables for restart.
437          CALL wrd_global
438!
439!--       Write processor specific flow field data for restart runs
440          CALL wrd_local
441!
442!--       Close restart File
443          CALL rd_mpi_io_close
444
445       ENDIF
446
447       CALL location_message( 'writing restart data', 'finished' )
448
449       CALL cpu_log( log_point(22), 'wrd_local', 'stop' )
450       
451    ENDIF
452!
453!-- Last actions for surface output, for instantaneous and time-averaged data
454    CALL surface_data_output_last_action( 0 )
455    CALL surface_data_output_last_action( 1 )
456
457!
458!-- If required, repeat output of header including the required CPU-time
459    IF ( myid == 0 )  CALL header
460!
461!-- Perform module specific last actions
462    CALL cpu_log( log_point(4), 'last actions', 'start' )
463
464    IF ( myid == 0 .AND. agents_active ) CALL mas_last_actions ! ToDo: move to module_interface
465
466    CALL module_interface_last_actions
467
468    CALL cpu_log( log_point(4), 'last actions', 'stop' )
469
470!
471!-- Close files
472    CALL close_file( 0 )
473
474!
475!-- Write run number to file (used by palmrun to create unified cycle numbers
476!-- for output files
477    IF ( myid == 0  .AND.  runnr > 0 )  THEN
478       OPEN( 90, FILE='RUN_NUMBER', FORM='FORMATTED' )
479       WRITE( 90, '(I4)' )  runnr
480       CLOSE( 90 )
481    ENDIF
482
483!
484!-- Take final CPU-time for CPU-time analysis
485    CALL cpu_log( log_point(1), 'total', 'stop' )
486    CALL cpu_statistics
487
488#if defined( __parallel )
489    CALL MPI_FINALIZE( ierr )
490#endif
491
492 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.