source: palm/trunk/SOURCE/palm.f90 @ 4417

Last change on this file since 4417 was 4414, checked in by suehring, 4 years ago

Remove deprecated topography arrays; Move basic initialization of numerics into an extra module interface

  • Property svn:keywords set to Id
File size: 14.7 KB
Line 
1! !> @file palm.f90
2!------------------------------------------------------------------------------!
3! This file is part of the PALM model system.
4!
5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2020 Leibniz Universitaet Hannover
18!------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 4414 2020-02-19 20:16:04Z monakurppa $
27! Call to module_interface_init_numerics
28!
29! 4400 2020-02-10 20:32:41Z suehring
30! Add interface to initialize data output with dom
31!
32! 4360 2020-01-07 11:25:50Z suehring
33! implement new palm_date_time_mod
34!
35! 4094 2019-07-12 09:24:21Z gronemeier
36! Corrected "Former revisions" section
37!
38! 4039 2019-06-18 10:32:41Z suehring
39! Rename subroutines in module for diagnostic quantities
40!
41! 4017 2019-06-06 12:16:46Z schwenkel
42! new module for calculation and output of diagnostic quantities added
43!
44! 3885 2019-04-11 11:29:34Z kanani
45! Changes related to global restructuring of location messages and introduction
46! of additional debug messages
47!
48! 3761 2019-02-25 15:31:42Z raasch
49! unused variable removed
50!
51! 3719 2019-02-06 13:10:18Z kanani
52! Included cpu measurement for wall/soil spinup
53!
54! 3703 2019-01-29 16:43:53Z knoop
55! Some interface calls moved to module_interface + cleanup
56!
57! 3648 2019-01-02 16:35:46Z suehring
58! Rename subroutines for surface-data output
59!
60! Revision 1.1  1997/07/24 11:23:35  raasch
61! Initial revision
62!
63!
64! Description:
65! ------------
66!> Large-Eddy Simulation (LES) model for atmospheric and oceanic boundary-layer
67!> flows
68!> see the PALM homepage https://palm-model.org for further information
69!------------------------------------------------------------------------------!
70 PROGRAM palm
71 
72
73    USE arrays_3d
74
75    USE bulk_cloud_model_mod,                                                  &
76        ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
77
78    USE control_parameters,                                                    &
79        ONLY:  constant_diffusion, child_domain,                               &
80               coupling_char, do2d_at_begin, do3d_at_begin, humidity,          &
81               initializing_actions, io_blocks, io_group, message_string,      &
82               neutral, passive_scalar, runnr, simulated_time_chr, spinup,     &
83               time_since_reference_point, user_interface_current_revision,    &
84               user_interface_required_revision, version, write_binary
85
86    USE cpulog,                                                                &
87        ONLY:  cpu_log, log_point, log_point_s, cpu_statistics
88
89    USE diagnostic_output_quantities_mod,                                      &
90        ONLY:  doq_calculate
91
92    USE indices,                                                               &
93        ONLY:  nbgp
94
95    USE kinds
96
97    USE module_interface,                                                      &
98        ONLY:  module_interface_init_numerics,                                 &
99               module_interface_init_output,                                   &
100               module_interface_last_actions
101
102
103    USE multi_agent_system_mod,                                                &
104        ONLY:  agents_active, mas_last_actions
105
106    USE netcdf_data_input_mod,                                                 &
107        ONLY:  netcdf_data_input_inquire_file, netcdf_data_input_init,         &
108               netcdf_data_input_surface_data, netcdf_data_input_topo
109
110    USE pegrid
111
112    USE pmc_particle_interface,                                                &
113        ONLY: pmcp_g_alloc_win
114
115    USE pmc_interface,                                                         &
116        ONLY:  nested_run, pmci_child_initialize, pmci_init,                   &
117               pmci_modelconfiguration, pmci_parent_initialize
118
119    USE surface_data_output_mod,                                               &
120        ONLY:  surface_data_output_last_action
121
122    USE write_restart_data_mod,                                                &
123        ONLY:  wrd_global, wrd_local
124
125#if defined( __parallel) && defined( _OPENACC )
126    USE openacc
127#endif
128
129
130    IMPLICIT NONE
131
132!
133!-- Local variables
134    CHARACTER(LEN=9) ::  time_to_string  !<
135    INTEGER(iwp)     ::  i               !< loop counter for blocked I/O
136#if defined( __parallel) && defined( _OPENACC )
137    INTEGER(iwp)     :: local_comm       !< local communicator (shared memory)
138    INTEGER(iwp)     :: local_num_procs  !< local number of processes
139    INTEGER(iwp)     :: local_id         !< local id
140    INTEGER(acc_device_kind) :: device_type !< device type for OpenACC
141    INTEGER(iwp)     ::  num_devices     !< number of devices visible to OpenACC
142    INTEGER(iwp)     ::  my_device       !< device used by this process
143#endif
144
145    version = 'PALM 6.0'
146    user_interface_required_revision = 'r3703'
147
148#if defined( __parallel )
149!
150!-- MPI initialisation. comm2d is preliminary set, because
151!-- it will be defined in init_pegrid but is used before in cpu_log.
152    CALL MPI_INIT( ierr )
153
154!
155!-- Initialize the coupling for nested-domain runs
156!-- comm_palm is the communicator which includes all PEs (MPI processes)
157!-- available for this (nested) model. If it is not a nested run, comm_palm
158!-- is returned as MPI_COMM_WORLD
159    CALL cpu_log( log_point_s(70), 'pmci_init', 'start' )
160    CALL pmci_init( comm_palm )
161    CALL cpu_log( log_point_s(70), 'pmci_init', 'stop' )
162    comm2d = comm_palm
163!
164!-- Get the (preliminary) number of MPI processes and the local PE-id (in case
165!-- of a further communicator splitting in init_coupling, these numbers will
166!-- be changed in init_pegrid).
167    IF ( nested_run )  THEN
168
169       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
170       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
171
172    ELSE
173
174       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
175       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
176!
177!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
178!--    will be splitted in init_coupling)
179       CALL init_coupling
180    ENDIF
181
182#ifdef _OPENACC
183!
184!-- Select OpenACC device to use in this process. For this find out how many
185!-- neighbors there are running on the same node and which id this process is.
186    IF ( nested_run )  THEN
187       CALL MPI_COMM_SPLIT_TYPE( comm_palm, MPI_COMM_TYPE_SHARED, 0,           &
188                                 MPI_INFO_NULL, local_comm, ierr )
189    ELSE
190       CALL MPI_COMM_SPLIT_TYPE( MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, 0,      &
191                                 MPI_INFO_NULL, local_comm, ierr )
192    ENDIF
193    CALL MPI_COMM_SIZE( local_comm, local_num_procs, ierr )
194    CALL MPI_COMM_RANK( local_comm, local_id, ierr )
195
196!
197!-- This loop including the barrier is a workaround for PGI compiler versions
198!-- up to and including 18.4. Later releases are able to select their GPUs in
199!-- parallel, without running into spurious errors.
200    DO i = 0, local_num_procs-1
201       CALL MPI_BARRIER( local_comm, ierr )
202
203       IF ( i == local_id )  THEN
204          device_type = acc_get_device_type()
205          num_devices = acc_get_num_devices( device_type )
206          my_device = MOD( local_id, num_devices )
207          CALL acc_set_device_num( my_device, device_type )
208       ENDIF
209    ENDDO
210
211    CALL MPI_COMM_FREE( local_comm, ierr )
212#endif
213#endif
214
215!
216!-- Initialize measuring of the CPU-time remaining to the run
217    CALL local_tremain_ini
218
219!
220!-- Start of total CPU time measuring.
221    CALL cpu_log( log_point(1), 'total', 'start' )
222    CALL cpu_log( log_point(2), 'initialisation', 'start' )
223
224!
225!-- Open a file for debug output
226    WRITE (myid_char,'(''_'',I6.6)')  myid
227    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
228
229!
230!-- Initialize dvrp logging. Also, one PE maybe split from the global
231!-- communicator for doing the dvrp output. In that case, the number of
232!-- PEs available for PALM is reduced by one and communicator comm_palm
233!-- is changed respectively.
234#if defined( __parallel )
235    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
236#endif
237
238!
239!-- Read control parameters from NAMELIST files and read environment-variables
240    CALL parin
241
242!
243!-- Check for the user's interface version
244    IF ( user_interface_current_revision /= user_interface_required_revision )  &
245    THEN
246       message_string = 'current user-interface revision "' //                  &
247                        TRIM( user_interface_current_revision ) // '" does ' // &
248                        'not match the required revision ' //                   &
249                        TRIM( user_interface_required_revision )
250        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
251    ENDIF
252
253!
254!-- Determine processor topology and local array indices
255    CALL init_pegrid
256!
257!-- Check if input file according to input-data standard exists
258    CALL netcdf_data_input_inquire_file
259!
260!-- Read topography input data if required. This is required before the
261!-- numerical grid is finally created in init_grid
262    CALL netcdf_data_input_topo 
263!
264!-- Generate grid parameters, initialize generic topography and further process
265!-- topography information if required
266    CALL init_grid
267!
268!-- Initialize boundary conditions and numerics such as the multigrid solver or
269!-- the advection routine
270    CALL module_interface_init_numerics
271!
272!-- Read global attributes if available. 
273    CALL netcdf_data_input_init 
274!
275!-- Read surface classification data, e.g. vegetation and soil types, water
276!-- surfaces, etc., if available. Some of these data is required before
277!-- check parameters is invoked.     
278    CALL netcdf_data_input_surface_data
279!
280!-- Check control parameters and deduce further quantities
281    CALL check_parameters
282
283    CALL init_3d_model
284
285    CALL module_interface_init_output
286
287!
288!-- Coupling protocol setup for nested-domain runs
289    IF ( nested_run )  THEN
290       CALL pmci_modelconfiguration
291!
292!--    Receive and interpolate initial data on children.
293!--    Child initialization must be made first if the model is both child and
294!--    parent if necessary
295       IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
296          CALL pmci_child_initialize
297!
298!--       Send initial condition data from parent to children
299          CALL pmci_parent_initialize
300!
301!--       Exchange_horiz is needed after the nest initialization
302          IF ( child_domain )  THEN
303             CALL exchange_horiz( u, nbgp )
304             CALL exchange_horiz( v, nbgp )
305             CALL exchange_horiz( w, nbgp )
306             IF ( .NOT. neutral )  THEN
307                CALL exchange_horiz( pt, nbgp )
308             ENDIF
309             IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
310             IF ( humidity )  THEN
311                CALL exchange_horiz( q, nbgp )
312                IF ( bulk_cloud_model  .AND.  microphysics_morrison )  THEN
313                  CALL exchange_horiz( qc, nbgp )
314                  CALL exchange_horiz( nc, nbgp )
315                ENDIF
316                IF ( bulk_cloud_model  .AND.  microphysics_seifert )  THEN
317                   CALL exchange_horiz( qr, nbgp ) 
318                   CALL exchange_horiz( nr, nbgp )
319                ENDIF
320             ENDIF
321             IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
322          ENDIF
323       ENDIF
324
325       CALL pmcp_g_alloc_win                    ! Must be called after pmci_child_initialize and pmci_parent_initialize
326    ENDIF
327
328!
329!-- Output of program header
330    IF ( myid == 0 )  CALL header
331
332    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
333
334!
335!-- Integration of the non-atmospheric equations (land surface model, urban
336!-- surface model)
337    IF ( spinup )  THEN
338       CALL cpu_log( log_point(41), 'wall/soil spinup', 'start' )
339       CALL time_integration_spinup
340       CALL cpu_log( log_point(41), 'wall/soil spinup', 'stop' )
341    ENDIF
342
343!
344!-- Set start time in format hh:mm:ss
345    simulated_time_chr = time_to_string( time_since_reference_point )
346
347!
348!-- If required, output of initial arrays
349    IF ( do2d_at_begin )  THEN
350       CALL doq_calculate    !TODO, will be called twice
351
352       CALL data_output_2d( 'xy', 0 )
353       CALL data_output_2d( 'xz', 0 )
354       CALL data_output_2d( 'yz', 0 )
355    ENDIF
356
357    IF ( do3d_at_begin )  THEN
358       CALL doq_calculate    !TODO, will be called twice
359
360       CALL data_output_3d( 0 )
361    ENDIF
362
363!
364!-- Integration of the model equations using timestep-scheme
365    CALL time_integration
366
367!
368!-- If required, write binary data for restart runs
369    IF ( write_binary )  THEN
370
371       CALL cpu_log( log_point(22), 'wrd_local', 'start' )
372
373       CALL location_message( 'writing restart data', 'start' )
374
375       DO  i = 0, io_blocks-1
376          IF ( i == io_group )  THEN
377
378!
379!--          Open binary file
380             CALL check_open( 14 )
381!
382!--          Write control parameters and other global variables for restart.
383             IF ( myid == 0 )  CALL wrd_global
384!
385!--          Write processor specific flow field data for restart runs
386             CALL wrd_local
387!
388!--          Close binary file
389             CALL close_file( 14 )
390
391          ENDIF
392#if defined( __parallel )
393          CALL MPI_BARRIER( comm2d, ierr )
394#endif
395       ENDDO
396
397       CALL location_message( 'writing restart data', 'finished' )
398
399       CALL cpu_log( log_point(22), 'wrd_local', 'stop' )
400       
401    ENDIF
402!
403!-- Last actions for surface output, for instantaneous and time-averaged data
404    CALL surface_data_output_last_action( 0 )
405    CALL surface_data_output_last_action( 1 )
406
407!
408!-- If required, repeat output of header including the required CPU-time
409    IF ( myid == 0 )  CALL header
410!
411!-- Perform module specific last actions
412    CALL cpu_log( log_point(4), 'last actions', 'start' )
413
414    IF ( myid == 0 .AND. agents_active ) CALL mas_last_actions ! ToDo: move to module_interface
415
416    CALL module_interface_last_actions
417
418    CALL cpu_log( log_point(4), 'last actions', 'stop' )
419
420!
421!-- Close files
422    CALL close_file( 0 )
423
424!
425!-- Write run number to file (used by palmrun to create unified cycle numbers
426!-- for output files
427    IF ( myid == 0  .AND.  runnr > 0 )  THEN
428       OPEN( 90, FILE='RUN_NUMBER', FORM='FORMATTED' )
429       WRITE( 90, '(I4)' )  runnr
430       CLOSE( 90 )
431    ENDIF
432
433!
434!-- Take final CPU-time for CPU-time analysis
435    CALL cpu_log( log_point(1), 'total', 'stop' )
436    CALL cpu_statistics
437
438#if defined( __parallel )
439    CALL MPI_FINALIZE( ierr )
440#endif
441
442 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.