source: palm/trunk/SOURCE/palm.f90 @ 4847

Last change on this file since 4847 was 4828, checked in by Giersch, 4 years ago

Copyright updated to year 2021, interface pmc_sort removed to accelarate the nesting code

  • Property svn:keywords set to Id
File size: 16.9 KB
RevLine 
[4017]1! !> @file palm.f90
[4797]2!--------------------------------------------------------------------------------------------------!
[2696]3! This file is part of the PALM model system.
[1036]4!
[4797]5! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
6! Public License as published by the Free Software Foundation, either version 3 of the License, or
7! (at your option) any later version.
[1036]8!
[4797]9! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
10! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
11! Public License for more details.
[1036]12!
[4797]13! You should have received a copy of the GNU General Public License along with PALM. If not, see
14! <http://www.gnu.org/licenses/>.
[1036]15!
[4828]16! Copyright 1997-2021 Leibniz Universitaet Hannover
[4797]17!--------------------------------------------------------------------------------------------------!
[1036]18!
[484]19! Current revisions:
[1]20! -----------------
[4539]21!
[4797]22!
[2233]23! Former revisions:
24! -----------------
25! $Id: palm.f90 4828 2021-01-05 11:21:41Z raasch $
[4797]26! file re-formatted to follow the PALM coding standard
27!
28! 4539 2020-05-18 14:05:17Z raasch
[4539]29! log point name changed
[4797]30!
[4539]31! 4535 2020-05-15 12:07:23Z raasch
[4535]32! bugfix for restart data format query
[4797]33!
[4535]34! 4496 2020-04-15 08:37:26Z raasch
[4496]35! bugfix: coupling character added to restart output filename
[4797]36!
[4496]37! 4495 2020-04-13 20:11:20Z raasch
[4495]38! restart data handling with MPI-IO added
[4797]39!
[4495]40! 4457 2020-03-11 14:20:43Z raasch
[4457]41! use statement for exchange horiz added
[4797]42!
[4457]43! 4444 2020-03-05 15:59:50Z raasch
[4444]44! bugfix: cpp-directives for serial mode added
[4797]45!
[4444]46! 4414 2020-02-19 20:16:04Z suehring
[4414]47! Call to module_interface_init_numerics
[4797]48!
[4414]49! 4400 2020-02-10 20:32:41Z suehring
[4400]50! Add interface to initialize data output with dom
[4797]51!
[4400]52! 4360 2020-01-07 11:25:50Z suehring
[4227]53! implement new palm_date_time_mod
[4797]54!
[4227]55! 4094 2019-07-12 09:24:21Z gronemeier
[4182]56! Corrected "Former revisions" section
[4797]57!
[4182]58! 4039 2019-06-18 10:32:41Z suehring
[4039]59! Rename subroutines in module for diagnostic quantities
[4797]60!
[4039]61! 4017 2019-06-06 12:16:46Z schwenkel
[3994]62! new module for calculation and output of diagnostic quantities added
[4797]63!
[3994]64! 3885 2019-04-11 11:29:34Z kanani
[4797]65! Changes related to global restructuring of location messages and introduction
[3885]66! of additional debug messages
[4797]67!
[3885]68! 3761 2019-02-25 15:31:42Z raasch
[3761]69! unused variable removed
[4797]70!
[3761]71! 3719 2019-02-06 13:10:18Z kanani
[3719]72! Included cpu measurement for wall/soil spinup
[4797]73!
[3719]74! 3703 2019-01-29 16:43:53Z knoop
[3685]75! Some interface calls moved to module_interface + cleanup
[4797]76!
[3685]77! 3648 2019-01-02 16:35:46Z suehring
[3648]78! Rename subroutines for surface-data output
[3298]79!
[4182]80! Revision 1.1  1997/07/24 11:23:35  raasch
81! Initial revision
82!
83!
[1]84! Description:
85! ------------
[4797]86!> Large-Eddy Simulation (LES) model for atmospheric and oceanic boundary-layer flows,
[3232]87!> see the PALM homepage https://palm-model.org for further information
[4797]88!--------------------------------------------------------------------------------------------------!
[1682]89 PROGRAM palm
[1]90
[4797]91
[1374]92    USE arrays_3d
[1]93
[4444]94#if defined( __parallel )
[4797]95    USE bulk_cloud_model_mod,                                                                      &
[3298]96        ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
[4444]97#endif
[3298]98
[4797]99    USE control_parameters,                                                                        &
100        ONLY:  coupling_char, do2d_at_begin, do3d_at_begin, io_blocks, io_group, message_string,   &
101               restart_data_format_output, runnr, simulated_time_chr, spinup,                      &
102               time_since_reference_point, user_interface_current_revision,                        &
[3241]103               user_interface_required_revision, version, write_binary
[1320]104
[4444]105#if defined( __parallel )
[4797]106    USE control_parameters,                                                                        &
107        ONLY:  child_domain, constant_diffusion, humidity, initializing_actions, neutral,          &
108               passive_scalar
[4444]109#endif
110
[4797]111    USE cpulog,                                                                                    &
112        ONLY:  cpu_log, cpu_statistics, log_point
[1320]113
[4444]114#if defined( __parallel )
[4797]115    USE cpulog,                                                                                    &
[4444]116        ONLY:  log_point_s
117#endif
118
[4797]119    USE diagnostic_output_quantities_mod,                                                          &
[4039]120        ONLY:  doq_calculate
[3994]121
[4444]122#if defined( __parallel )
[4797]123    USE exchange_horiz_mod,                                                                        &
[4457]124        ONLY:  exchange_horiz
125
[4797]126    USE indices,                                                                                   &
[2232]127        ONLY:  nbgp
[4444]128#endif
[1374]129
[3274]130    USE kinds
131
[4797]132    USE module_interface,                                                                          &
133        ONLY:  module_interface_init_numerics,                                                     &
134               module_interface_init_output,                                                       &
[4400]135               module_interface_last_actions
[3687]136
[4400]137
[4797]138    USE multi_agent_system_mod,                                                                    &
[3235]139        ONLY:  agents_active, mas_last_actions
140
[4797]141    USE netcdf_data_input_mod,                                                                     &
142        ONLY:  netcdf_data_input_inquire_file, netcdf_data_input_init,                             &
[2696]143               netcdf_data_input_surface_data, netcdf_data_input_topo
144
[1]145    USE pegrid
146
[4444]147#if defined( __parallel )
[4797]148    USE pmc_particle_interface,                                                                    &
[2894]149        ONLY: pmcp_g_alloc_win
150
[4797]151    USE pmc_interface,                                                                             &
152        ONLY:  nested_run, pmci_child_initialize, pmci_init, pmci_modelconfiguration,              &
153               pmci_parent_initialize
[4444]154#endif
[4414]155
[4797]156    USE restart_data_mpi_io_mod,                                                                   &
[4495]157        ONLY:  rd_mpi_io_close, rd_mpi_io_open
158
[4797]159    USE surface_data_output_mod,                                                                   &
[3648]160        ONLY:  surface_data_output_last_action
[1762]161
[4797]162    USE write_restart_data_mod,                                                                    &
[2894]163        ONLY:  wrd_global, wrd_local
[2801]164
[4444]165#if defined( __parallel )  &&  defined( _OPENACC )
[3689]166    USE openacc
167#endif
[1747]168
[3689]169
[1]170    IMPLICIT NONE
171
172!
173!-- Local variables
[3241]174    CHARACTER(LEN=9) ::  time_to_string  !<
[4797]175
176    INTEGER(iwp)     ::  i                   !< loop counter for blocked I/O
[3689]177#if defined( __parallel) && defined( _OPENACC )
[4797]178    INTEGER(acc_device_kind) :: device_type      !< device type for OpenACC
179    INTEGER(iwp)             :: local_comm       !< local communicator (shared memory)
180    INTEGER(iwp)             :: local_num_procs  !< local number of processes
181    INTEGER(iwp)             :: local_id         !< local id
182    INTEGER(iwp)             ::  num_devices     !< number of devices visible to OpenACC
183    INTEGER(iwp)             ::  my_device       !< device used by this process
[3689]184#endif
[1]185
[3487]186    version = 'PALM 6.0'
[4495]187    user_interface_required_revision = 'r4495'
[75]188
[1]189#if defined( __parallel )
190!
[4797]191!-- MPI initialisation. comm2d is preliminary set, because it will be defined in init_pegrid but is
192!-- used before in cpu_log.
[1]193    CALL MPI_INIT( ierr )
[1762]194
195!
[4797]196!-- Initialize the coupling for nested-domain runs comm_palm is the communicator which includes all
197!-- PEs (MPI processes) available for this (nested) model. If it is not a nested run, comm_palm is
198!-- returned as MPI_COMM_WORLD.
[2951]199    CALL cpu_log( log_point_s(70), 'pmci_init', 'start' )
[1762]200    CALL pmci_init( comm_palm )
[2951]201    CALL cpu_log( log_point_s(70), 'pmci_init', 'stop' )
[1762]202    comm2d = comm_palm
[1764]203!
[4797]204!-- Get the (preliminary) number of MPI processes and the local PE-id (in case of a further
205!-- communicator splitting in init_coupling, these numbers will be changed in init_pegrid).
[1764]206    IF ( nested_run )  THEN
[1762]207
[1764]208       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
209       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
[1762]210
[1764]211    ELSE
212
213       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
214       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
[1]215!
[4797]216!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm will be splitted
217!--    in init_coupling)
[1764]218       CALL init_coupling
219    ENDIF
[3689]220
221#ifdef _OPENACC
222!
[4797]223!-- Select OpenACC device to use in this process. For this find out how many neighbors there are
224!-- running on the same node and which id this process is.
[3689]225    IF ( nested_run )  THEN
[4797]226       CALL MPI_COMM_SPLIT_TYPE( comm_palm, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL, local_comm,    &
227                                 ierr )
[3689]228    ELSE
[4797]229       CALL MPI_COMM_SPLIT_TYPE( MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL,           &
230                                 local_comm, ierr )
[3689]231    ENDIF
232    CALL MPI_COMM_SIZE( local_comm, local_num_procs, ierr )
233    CALL MPI_COMM_RANK( local_comm, local_id, ierr )
234
235!
[4797]236!-- This loop including the barrier is a workaround for PGI compiler versions up to and including
237!-- 18.4. Later releases are able to select their GPUs in parallel, without running into spurious
238!-- errors.
[3689]239    DO i = 0, local_num_procs-1
240       CALL MPI_BARRIER( local_comm, ierr )
241
242       IF ( i == local_id )  THEN
243          device_type = acc_get_device_type()
244          num_devices = acc_get_num_devices( device_type )
245          my_device = MOD( local_id, num_devices )
246          CALL acc_set_device_num( my_device, device_type )
247       ENDIF
248    ENDDO
249
250    CALL MPI_COMM_FREE( local_comm, ierr )
[102]251#endif
[3689]252#endif
[102]253
254!
[1]255!-- Initialize measuring of the CPU-time remaining to the run
256    CALL local_tremain_ini
257
258!
259!-- Start of total CPU time measuring.
260    CALL cpu_log( log_point(1), 'total', 'start' )
261    CALL cpu_log( log_point(2), 'initialisation', 'start' )
262
263!
[206]264!-- Open a file for debug output
[1468]265    WRITE (myid_char,'(''_'',I6.6)')  myid
[206]266    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
267
268!
[4797]269!-- Initialize dvrp logging. Also, one PE maybe split from the global communicator for doing the
270!-- dvrp output. In that case, the number of PEs available for PALM is reduced by one and
271!-- communicator comm_palm is changed respectively.
[1]272#if defined( __parallel )
273    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
[102]274#endif
275
276!
[108]277!-- Read control parameters from NAMELIST files and read environment-variables
278    CALL parin
279
280!
[1666]281!-- Check for the user's interface version
[4797]282    IF ( user_interface_current_revision /= user_interface_required_revision )  THEN
283       message_string = 'current user-interface revision "' //                                     &
284                        TRIM( user_interface_current_revision ) // '" does ' //                    &
285                        'not match the required revision ' //                                      &
[1666]286                        TRIM( user_interface_required_revision )
[1668]287        CALL message( 'palm', 'PA0169', 1, 2, 0, 6, 0 )
[1666]288    ENDIF
289
290!
[108]291!-- Determine processor topology and local array indices
292    CALL init_pegrid
293!
[2696]294!-- Check if input file according to input-data standard exists
295    CALL netcdf_data_input_inquire_file
296!
[4797]297!-- Read topography input data if required. This is required before the numerical grid is finally
298!-- created in init_grid.
299    CALL netcdf_data_input_topo
[2696]300!
[4797]301!-- Generate grid parameters, initialize generic topography and further process topography
302!-- information if required.
[1]303    CALL init_grid
304!
[4797]305!-- Initialize boundary conditions and numerics such as the multigrid solver or the advection
306!-- routine
[4414]307    CALL module_interface_init_numerics
308!
[4797]309!-- Read global attributes if available.
310    CALL netcdf_data_input_init
[2696]311!
[4797]312!-- Read surface classification data, e.g. vegetation and soil types, water surfaces, etc., if
313!-- available. Some of these data is required before check parameters is invoked.
[2696]314    CALL netcdf_data_input_surface_data
315!
[1]316!-- Check control parameters and deduce further quantities
317    CALL check_parameters
[3298]318
[1]319    CALL init_3d_model
320
[4400]321    CALL module_interface_init_output
322
[4444]323#if defined( __parallel )
[1]324!
[1762]325!-- Coupling protocol setup for nested-domain runs
[1764]326    IF ( nested_run )  THEN
327       CALL pmci_modelconfiguration
[1781]328!
[1933]329!--    Receive and interpolate initial data on children.
[4797]330!--    Child initialization must be made first if the model is both child and parent if necessary.
[2177]331       IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
332          CALL pmci_child_initialize
[1781]333!
[2177]334!--       Send initial condition data from parent to children
335          CALL pmci_parent_initialize
[1781]336!
[3232]337!--       Exchange_horiz is needed after the nest initialization
[3182]338          IF ( child_domain )  THEN
[2178]339             CALL exchange_horiz( u, nbgp )
340             CALL exchange_horiz( v, nbgp )
341             CALL exchange_horiz( w, nbgp )
342             IF ( .NOT. neutral )  THEN
343                CALL exchange_horiz( pt, nbgp )
344             ENDIF
345             IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
346             IF ( humidity )  THEN
347                CALL exchange_horiz( q, nbgp )
[3274]348                IF ( bulk_cloud_model  .AND.  microphysics_morrison )  THEN
[2292]349                  CALL exchange_horiz( qc, nbgp )
350                  CALL exchange_horiz( nc, nbgp )
351                ENDIF
[3274]352                IF ( bulk_cloud_model  .AND.  microphysics_seifert )  THEN
[4797]353                   CALL exchange_horiz( qr, nbgp )
[2178]354                   CALL exchange_horiz( nr, nbgp )
355                ENDIF
356             ENDIF
357             IF ( passive_scalar )  CALL exchange_horiz( s, nbgp )
[1933]358          ENDIF
[1781]359       ENDIF
360
[2801]361       CALL pmcp_g_alloc_win                    ! Must be called after pmci_child_initialize and pmci_parent_initialize
[1764]362    ENDIF
[4444]363#endif
[1762]364
365!
[1]366!-- Output of program header
367    IF ( myid == 0 )  CALL header
368
369    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
370
371!
[4797]372!-- Integration of the non-atmospheric equations (land surface model, urban surface model)
[2296]373    IF ( spinup )  THEN
[3719]374       CALL cpu_log( log_point(41), 'wall/soil spinup', 'start' )
[2296]375       CALL time_integration_spinup
[3719]376       CALL cpu_log( log_point(41), 'wall/soil spinup', 'stop' )
[2296]377    ENDIF
378
379!
[1]380!-- Set start time in format hh:mm:ss
[2977]381    simulated_time_chr = time_to_string( time_since_reference_point )
[1]382
383!
384!-- If required, output of initial arrays
385    IF ( do2d_at_begin )  THEN
[4039]386       CALL doq_calculate    !TODO, will be called twice
[3994]387
[1]388       CALL data_output_2d( 'xy', 0 )
389       CALL data_output_2d( 'xz', 0 )
390       CALL data_output_2d( 'yz', 0 )
391    ENDIF
[1976]392
[1]393    IF ( do3d_at_begin )  THEN
[4039]394       CALL doq_calculate    !TODO, will be called twice
[3994]395
[1]396       CALL data_output_3d( 0 )
397    ENDIF
398
399!
[495]400!-- Integration of the model equations using timestep-scheme
[1]401    CALL time_integration
402
403!
[495]404!-- If required, write binary data for restart runs
[2298]405    IF ( write_binary )  THEN
[759]406
[4539]407       CALL cpu_log( log_point(22), 'write-restart-data', 'start' )
[759]408
[3885]409       CALL location_message( 'writing restart data', 'start' )
[1402]410
[4495]411       IF ( TRIM( restart_data_format_output ) == 'fortran_binary' )  THEN
[2894]412
[4495]413          DO  i = 0, io_blocks-1
414             IF ( i == io_group )  THEN
415
[1]416!
[4495]417!--             Open binary file
418                CALL check_open( 14 )
[2894]419!
[4495]420!--             Write control parameters and other global variables for restart.
421                IF ( myid == 0 )  CALL wrd_global
[2894]422!
[4495]423!--             Write processor specific flow field data for restart runs
424                CALL wrd_local
[2894]425!
[4495]426!--             Close binary file
427                CALL close_file( 14 )
[2894]428
[4495]429             ENDIF
[759]430#if defined( __parallel )
[4495]431             CALL MPI_BARRIER( comm2d, ierr )
[759]432#endif
[4495]433          ENDDO
[759]434
[4535]435       ELSEIF ( restart_data_format_output(1:3) == 'mpi' )  THEN
[4495]436!
437!--       Open MPI-IO restart file
[4496]438          CALL rd_mpi_io_open( 'write', 'BINOUT' // TRIM( coupling_char ) )
[4495]439!
440!--       Write control parameters and other global variables for restart.
441          CALL wrd_global
442!
443!--       Write processor specific flow field data for restart runs
444          CALL wrd_local
445!
446!--       Close restart File
447          CALL rd_mpi_io_close
448
449       ENDIF
450
[3885]451       CALL location_message( 'writing restart data', 'finished' )
[1402]452
[4539]453       CALL cpu_log( log_point(22), 'write-restart-data', 'stop' )
[4797]454
[495]455    ENDIF
[3494]456!
457!-- Last actions for surface output, for instantaneous and time-averaged data
[3648]458    CALL surface_data_output_last_action( 0 )
459    CALL surface_data_output_last_action( 1 )
[1]460
461!
462!-- If required, repeat output of header including the required CPU-time
463    IF ( myid == 0 )  CALL header
464!
[3687]465!-- Perform module specific last actions
[1]466    CALL cpu_log( log_point(4), 'last actions', 'start' )
[3235]467
[4797]468    IF ( myid == 0 .AND. agents_active )  CALL mas_last_actions ! ToDo: move to module_interface
[2894]469
[3687]470    CALL module_interface_last_actions
471
[1]472    CALL cpu_log( log_point(4), 'last actions', 'stop' )
473
474!
[3687]475!-- Close files
476    CALL close_file( 0 )
477
478!
[4797]479!-- Write run number to file (used by palmrun to create unified cycle numbers for output files).
[2261]480    IF ( myid == 0  .AND.  runnr > 0 )  THEN
481       OPEN( 90, FILE='RUN_NUMBER', FORM='FORMATTED' )
482       WRITE( 90, '(I4)' )  runnr
483       CLOSE( 90 )
484    ENDIF
485
486!
[1]487!-- Take final CPU-time for CPU-time analysis
488    CALL cpu_log( log_point(1), 'total', 'stop' )
489    CALL cpu_statistics
490
491#if defined( __parallel )
492    CALL MPI_FINALIZE( ierr )
493#endif
494
495 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.