source: palm/trunk/SOURCE/palm.f90 @ 1667

Last change on this file since 1667 was 1667, checked in by raasch, 9 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 13.3 KB
Line 
1 PROGRAM palm
2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2014 Leibniz Universitaet Hannover
18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 1667 2015-09-23 08:01:16Z raasch $
27!
28! 1666 2015-09-23 07:31:10Z raasch
29! check for user's interface version added
30!
31! 1482 2014-10-18 12:34:45Z raasch
32! adjustments for using CUDA-aware OpenMPI
33!
34! 1468 2014-09-24 14:06:57Z maronga
35! Adapted for use on up to 6-digit processor cores
36!
37! 1402 2014-05-09 14:25:13Z raasch
38! location messages added
39!
40! 1374 2014-04-25 12:55:07Z raasch
41! bugfix: various modules added
42!
43! 1320 2014-03-20 08:40:49Z raasch
44! ONLY-attribute added to USE-statements,
45! kind-parameters added to all INTEGER and REAL declaration statements,
46! kinds are defined in new module kinds,
47! old module precision_kind is removed,
48! revision history before 2012 removed,
49! comment fields (!:) to be used for variable explanations added to
50! all variable declaration statements
51!
52! 1318 2014-03-17 13:35:16Z raasch
53! module interfaces removed
54!
55! 1241 2013-10-30 11:36:58Z heinze
56! initialization of nuding and large scale forcing from external file
57!
58! 1221 2013-09-10 08:59:13Z raasch
59! +wall_flags_00, rflags_invers, rflags_s_inner in copyin statement
60!
61! 1212 2013-08-15 08:46:27Z raasch
62! +tri in copyin statement
63!
64! 1179 2013-06-14 05:57:58Z raasch
65! ref_state added to copyin-list
66!
67! 1113 2013-03-10 02:48:14Z raasch
68! openACC statements modified
69!
70! 1111 2013-03-08 23:54:10Z raasch
71! openACC statements updated
72!
73! 1092 2013-02-02 11:24:22Z raasch
74! unused variables removed
75!
76! 1036 2012-10-22 13:43:42Z raasch
77! code put under GPL (PALM 3.9)
78!
79! 1015 2012-09-27 09:23:24Z raasch
80! Version number changed from 3.8 to 3.8a.
81! OpenACC statements added + code changes required for GPU optimization
82!
83! 849 2012-03-15 10:35:09Z raasch
84! write_particles renamed lpm_write_restart_file
85!
86! Revision 1.1  1997/07/24 11:23:35  raasch
87! Initial revision
88!
89!
90! Description:
91! ------------
92! Large-Eddy Simulation (LES) model for the convective boundary layer,
93! optimized for use on parallel machines (implementation realized using the
94! Message Passing Interface (MPI)). The model can also be run on vector machines
95! (less well optimized) and workstations. Versions for the different types of
96! machines are controlled via cpp-directives.
97! Model runs are only feasible using the ksh-script mrun.
98!------------------------------------------------------------------------------!
99
100    USE arrays_3d
101
102    USE control_parameters,                                                    &
103        ONLY:  coupling_char, coupling_mode, do2d_at_begin, do3d_at_begin,     &
104               io_blocks, io_group, large_scale_forcing, message_string,       &
105               nudging, simulated_time, simulated_time_chr,                    &
106               user_interface_current_revision,                                &
107               user_interface_required_revision, version, wall_heatflux,       &
108               write_binary
109
110    USE cpulog,                                                                &
111        ONLY:  cpu_log, log_point, cpu_statistics
112
113    USE grid_variables,                                                        &
114        ONLY:  fxm, fxp, fym, fyp, fwxm, fwxp, fwym, fwyp, wall_e_x, wall_e_y, &
115               wall_u, wall_v, wall_w_x, wall_w_y
116
117    USE indices,                                                               &
118        ONLY:  ngp_2dh, ngp_2dh_s_inner, nzb_diff_s_inner, nzb_diff_s_outer,   &
119               nzb_diff_u, nzb_diff_v, nzb_s_inner, nzb_s_outer, nzb_u_inner,  &
120               nzb_u_outer, nzb_v_inner, nzb_v_outer, nzb_w_inner,             &
121               nzb_w_outer, rflags_invers, rflags_s_inner, wall_flags_0,       &
122               wall_flags_00
123
124    USE kinds
125
126    USE ls_forcing_mod,                                                        &
127        ONLY:  init_ls_forcing
128
129    USE nudge_mod,                                                             &
130        ONLY:  init_nudge
131
132    USE particle_attributes,                                                   &
133        ONLY:  particle_advection
134
135    USE pegrid
136
137    USE statistics,                                                            &
138        ONLY:  hom, rmask, weight_pres, weight_substep
139
140#if defined( __openacc )
141    USE OPENACC
142#endif
143
144    IMPLICIT NONE
145
146!
147!-- Local variables
148    CHARACTER(LEN=9)  ::  time_to_string  !:
149    CHARACTER(LEN=10) ::  env_string      !: to store string of environment var
150    INTEGER(iwp)      ::  env_stat        !: to hold status of GET_ENV
151    INTEGER(iwp)      ::  i               !:
152    INTEGER(iwp)      ::  myid_openmpi    !: OpenMPI local rank for CUDA aware MPI
153#if defined( __openacc )
154    REAL(wp), DIMENSION(100) ::  acc_dum     !:
155#endif
156
157    version = 'PALM 4.0'
158    user_interface_required_revision = 'r1663'
159
160#if defined( __parallel )
161!
162!-- MPI initialisation. comm2d is preliminary set, because
163!-- it will be defined in init_pegrid but is used before in cpu_log.
164    CALL MPI_INIT( ierr )
165    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
166    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
167    comm_palm = MPI_COMM_WORLD
168    comm2d    = MPI_COMM_WORLD
169
170!
171!-- Initialize PE topology in case of coupled runs
172    CALL init_coupling
173#endif
174
175#if defined( __openacc )
176!
177!-- Get the local MPI rank in case of CUDA aware OpenMPI. Important, if there
178!-- is more than one accelerator board on the node
179    CALL GET_ENVIRONMENT_VARIABLE('OMPI_COMM_WORLD_LOCAL_RANK',                &
180         VALUE=env_string, STATUS=env_stat )
181    READ( env_string, '(I1)' )  myid_openmpi
182    PRINT*, '### local_rank = ', myid_openmpi, '  status=',env_stat
183!
184!-- Get the number of accelerator boards per node and assign the MPI processes
185!-- to these boards
186    PRINT*, '*** ACC_DEVICE_NVIDIA = ', ACC_DEVICE_NVIDIA
187    num_acc_per_node  = ACC_GET_NUM_DEVICES( ACC_DEVICE_NVIDIA )
188    IF ( numprocs == 1  .AND.  num_acc_per_node > 0 )  num_acc_per_node = 1
189    PRINT*, '*** myid = ', myid_openmpi, ' num_acc_per_node = ', num_acc_per_node
190    acc_rank = MOD( myid_openmpi, num_acc_per_node )
191    CALL ACC_SET_DEVICE_NUM ( acc_rank, ACC_DEVICE_NVIDIA )
192!
193!-- Test output (to be removed later)
194    WRITE (*,'(A,I6,A,I3,A,I3,A,I3)') '*** Connect MPI-Task ', myid_openmpi,   &
195                                      ' to CPU ', acc_rank, ' Devices: ',      &
196                                      num_acc_per_node, ' connected to:',      &
197                                      ACC_GET_DEVICE_NUM( ACC_DEVICE_NVIDIA )
198#endif
199
200!
201!-- Ensure that OpenACC first attaches the GPU devices by copying a dummy data
202!-- region
203    !$acc data copyin( acc_dum )
204
205!
206!-- Initialize measuring of the CPU-time remaining to the run
207    CALL local_tremain_ini
208
209!
210!-- Start of total CPU time measuring.
211    CALL cpu_log( log_point(1), 'total', 'start' )
212    CALL cpu_log( log_point(2), 'initialisation', 'start' )
213
214!
215!-- Open a file for debug output
216    WRITE (myid_char,'(''_'',I6.6)')  myid
217    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
218
219!
220!-- Initialize dvrp logging. Also, one PE maybe split from the global
221!-- communicator for doing the dvrp output. In that case, the number of
222!-- PEs available for PALM is reduced by one and communicator comm_palm
223!-- is changed respectively.
224#if defined( __parallel )
225    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
226!
227!-- TEST OUTPUT (TO BE REMOVED)
228    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
229    CALL LOCAL_FLUSH( 9 )
230    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
231       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
232               TRIM( coupling_mode )
233    ENDIF
234#endif
235
236    CALL init_dvrp_logging
237
238!
239!-- Read control parameters from NAMELIST files and read environment-variables
240    CALL parin
241
242!
243!-- Check for the user's interface version
244    IF ( user_interface_current_revision /= user_interface_required_revision )  &
245    THEN
246       message_string = 'current user-interface revision "' //                  &
247                        TRIM( user_interface_current_revision ) // '" does ' // &
248                        'not match the required revision ' //                   &
249                        TRIM( user_interface_required_revision )
250        CALL message( 'palm', 'PA0169', 0, 1, 0, 6, 0 )
251    ENDIF
252
253!
254!-- Determine processor topology and local array indices
255    CALL init_pegrid
256
257!
258!-- Generate grid parameters
259    CALL init_grid
260
261!
262!-- Initialize nudging if required
263    IF ( nudging )  THEN
264       CALL init_nudge
265    ENDIF
266
267!
268!-- Initialize reading of large scale forcing from external file - if required
269    IF ( large_scale_forcing )  THEN
270       CALL init_ls_forcing
271    ENDIF
272
273!
274!-- Check control parameters and deduce further quantities
275    CALL check_parameters
276
277!
278!-- Initialize all necessary variables
279    CALL init_3d_model
280
281!
282!-- Output of program header
283    IF ( myid == 0 )  CALL header
284
285    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
286
287!
288!-- Set start time in format hh:mm:ss
289    simulated_time_chr = time_to_string( simulated_time )
290
291!
292!-- If required, output of initial arrays
293    IF ( do2d_at_begin )  THEN
294       CALL data_output_2d( 'xy', 0 )
295       CALL data_output_2d( 'xz', 0 )
296       CALL data_output_2d( 'yz', 0 )
297    ENDIF
298    IF ( do3d_at_begin )  THEN
299       CALL data_output_3d( 0 )
300    ENDIF
301
302!
303!-- Declare and initialize variables in the accelerator memory with their
304!-- host values
305    !$acc  data copyin( d, diss, e, e_p, kh, km, p, pt, pt_p, q, ql, tend, te_m, tpt_m, tu_m, tv_m, tw_m, u, u_p, v, vpt, v_p, w, w_p )          &
306    !$acc       copyin( tri, tric, dzu, ddzu, ddzw, dd2zu, l_grid, l_wall, ptdf_x, ptdf_y, pt_init, rdf, rdf_sc, ref_state, ug, u_init, vg, v_init, zu, zw )   &
307    !$acc       copyin( hom, qs, qsws, qswst, rif, rif_wall, shf, ts, tswst, us, usws, uswst, vsws, vswst, z0, z0h )      &
308    !$acc       copyin( fxm, fxp, fym, fyp, fwxm, fwxp, fwym, fwyp, nzb_diff_s_inner, nzb_diff_s_outer, nzb_diff_u )       &
309    !$acc       copyin( nzb_diff_v, nzb_s_inner, nzb_s_outer, nzb_u_inner )    &
310    !$acc       copyin( nzb_u_outer, nzb_v_inner, nzb_v_outer, nzb_w_inner )   &
311    !$acc       copyin( nzb_w_outer, rflags_invers, rflags_s_inner, rmask, wall_heatflux, wall_e_x, wall_e_y, wall_u, wall_v, wall_w_x, wall_w_y, wall_flags_0, wall_flags_00 )  &
312    !$acc       copyin( ngp_2dh, ngp_2dh_s_inner )  &
313    !$acc       copyin( weight_pres, weight_substep )
314!
315!-- Integration of the model equations using timestep-scheme
316    CALL time_integration
317
318!
319!-- If required, write binary data for restart runs
320    IF ( write_binary(1:4) == 'true' )  THEN
321
322       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
323
324       CALL location_message( 'writing restart data', .FALSE. )
325
326       CALL check_open( 14 )
327
328       DO  i = 0, io_blocks-1
329          IF ( i == io_group )  THEN
330!
331!--          Write flow field data
332             CALL write_3d_binary
333          ENDIF
334#if defined( __parallel )
335          CALL MPI_BARRIER( comm2d, ierr )
336#endif
337       ENDDO
338
339       CALL location_message( 'finished', .TRUE. )
340
341       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
342
343!
344!--    If required, write particle data
345       IF ( particle_advection )  CALL lpm_write_restart_file
346    ENDIF
347
348!
349!-- If required, repeat output of header including the required CPU-time
350    IF ( myid == 0 )  CALL header
351!
352!-- If required, final user-defined actions, and
353!-- last actions on the open files and close files. Unit 14 was opened
354!-- in write_3d_binary but it is closed here, to allow writing on this
355!-- unit in routine user_last_actions.
356    CALL cpu_log( log_point(4), 'last actions', 'start' )
357    DO  i = 0, io_blocks-1
358       IF ( i == io_group )  THEN
359          CALL user_last_actions
360          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
361       ENDIF
362#if defined( __parallel )
363       CALL MPI_BARRIER( comm2d, ierr )
364#endif
365    ENDDO
366    CALL close_file( 0 )
367    CALL close_dvrp
368    CALL cpu_log( log_point(4), 'last actions', 'stop' )
369
370#if defined( __mpi2 )
371!
372!-- Test exchange via intercommunicator in case of a MPI-2 coupling
373    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
374       i = 12345 + myid
375       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
376    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
377       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
378       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
379    ENDIF
380#endif
381
382!
383!-- Close the OpenACC dummy data region
384    !$acc end data
385    !$acc end data
386
387!
388!-- Take final CPU-time for CPU-time analysis
389    CALL cpu_log( log_point(1), 'total', 'stop' )
390    CALL cpu_statistics
391
392#if defined( __parallel )
393    CALL MPI_FINALIZE( ierr )
394#endif
395
396 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.