source: palm/trunk/SOURCE/palm.f90 @ 1036

Last change on this file since 1036 was 1036, checked in by raasch, 11 years ago

code has been put under the GNU General Public License (v3)

  • Property svn:keywords set to Id
File size: 9.7 KB
Line 
1 PROGRAM palm
2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2012  Leibniz University Hannover
18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! -----------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: palm.f90 1036 2012-10-22 13:43:42Z raasch $
27!
28! 1015 2012-09-27 09:23:24Z raasch
29! Version number changed from 3.8 to 3.8a.
30! OpenACC statements added + code changes required for GPU optimization
31!
32! 849 2012-03-15 10:35:09Z raasch
33! write_particles renamed lpm_write_restart_file
34!
35! 759 2011-09-15 13:58:31Z raasch
36! Splitting of parallel I/O, cpu measurement for write_3d_binary and opening
37! of unit 14 moved to here
38!
39! 495 2010-03-02 00:40:15Z raasch
40! Particle data for restart runs are only written if write_binary=.T..
41!
42! 215 2008-11-18 09:54:31Z raasch
43! Initialization of coupled runs modified for MPI-1 and moved to external
44! subroutine init_coupling
45!
46! 197 2008-09-16 15:29:03Z raasch
47! Workaround for getting information about the coupling mode
48!
49! 108 2007-08-24 15:10:38Z letzel
50! Get coupling mode from environment variable, change location of debug output
51!
52! 75 2007-03-22 09:54:05Z raasch
53! __vtk directives removed, write_particles is called only in case of particle
54! advection switched on, open unit 9 for debug output,
55! setting of palm version moved from modules to here
56!
57! RCS Log replace by Id keyword, revision history cleaned up
58!
59! Revision 1.10  2006/08/04 14:53:12  raasch
60! Distibution of run description header removed, call of header moved behind
61! init_3d_model
62!
63! Revision 1.2  2001/01/25 07:15:06  raasch
64! Program name changed to PALM, module test_variables removed.
65! Initialization of dvrp logging as well as exit of dvrp moved to new
66! subroutines init_dvrp_logging and close_dvrp (file init_dvrp.f90)
67!
68! Revision 1.1  1997/07/24 11:23:35  raasch
69! Initial revision
70!
71!
72! Description:
73! ------------
74! Large-Eddy Simulation (LES) model for the convective boundary layer,
75! optimized for use on parallel machines (implementation realized using the
76! Message Passing Interface (MPI)). The model can also be run on vector machines
77! (less well optimized) and workstations. Versions for the different types of
78! machines are controlled via cpp-directives.
79! Model runs are only feasible using the ksh-script mrun.
80!------------------------------------------------------------------------------!
81
82
83    USE arrays_3d
84    USE constants
85    USE control_parameters
86    USE cpulog
87    USE dvrp_variables
88    USE grid_variables
89    USE indices
90    USE interfaces
91    USE model_1d
92    USE particle_attributes
93    USE pegrid
94    USE spectrum
95    USE statistics
96
97#if defined( __openacc )
98    USE OPENACC
99#endif
100
101    IMPLICIT NONE
102
103!
104!-- Local variables
105    CHARACTER (LEN=9) ::  time_to_string
106    CHARACTER (LEN=1) ::  cdum
107    INTEGER           ::  i, run_description_header_i(80)
108#if defined( __openacc )
109    REAL, DIMENSION(100) ::  acc_dum
110#endif
111
112    version = 'PALM 3.9'
113
114#if defined( __parallel )
115!
116!-- MPI initialisation. comm2d is preliminary set, because
117!-- it will be defined in init_pegrid but is used before in cpu_log.
118    CALL MPI_INIT( ierr )
119    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
120    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
121    comm_palm = MPI_COMM_WORLD
122    comm2d    = MPI_COMM_WORLD
123
124!
125!-- Initialize PE topology in case of coupled runs
126    CALL init_coupling
127#endif
128
129#if defined( __openacc )
130!
131!-- Get the number of accelerator boards per node and assign the MPI processes
132!-- to these boards
133    num_acc_per_node  = ACC_GET_NUM_DEVICES( ACC_DEVICE_NVIDIA )
134    acc_rank = MOD( myid, num_acc_per_node )
135    CALL ACC_SET_DEVICE_NUM ( acc_rank, ACC_DEVICE_NVIDIA )
136!
137!-- Test output (to be removed later)
138    WRITE (*,'(A,I4,A,I3,A,I3,A,I3)') '*** Connect MPI-Task ', myid,' to CPU ',&
139                                      acc_rank, ' Devices: ', num_acc_per_node,&
140                                      ' connected to:',                        &
141                                      ACC_GET_DEVICE_NUM( ACC_DEVICE_NVIDIA )
142#endif
143!
144!-- Ensure that OpenACC first attaches the GPU devices by copying a dummy data
145!-- region
146    !$acc data copyin( acc_dum )
147
148!
149!-- Initialize measuring of the CPU-time remaining to the run
150    CALL local_tremain_ini
151
152!
153!-- Start of total CPU time measuring.
154    CALL cpu_log( log_point(1), 'total', 'start' )
155    CALL cpu_log( log_point(2), 'initialisation', 'start' )
156
157!
158!-- Open a file for debug output
159    WRITE (myid_char,'(''_'',I4.4)')  myid
160    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
161
162!
163!-- Initialize dvrp logging. Also, one PE maybe split from the global
164!-- communicator for doing the dvrp output. In that case, the number of
165!-- PEs available for PALM is reduced by one and communicator comm_palm
166!-- is changed respectively.
167#if defined( __parallel )
168    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
169!
170!-- TEST OUTPUT (TO BE REMOVED)
171    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
172    CALL LOCAL_FLUSH( 9 )
173    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
174       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
175               TRIM( coupling_mode )
176    ENDIF
177#endif
178
179    CALL init_dvrp_logging
180
181!
182!-- Read control parameters from NAMELIST files and read environment-variables
183    CALL parin
184
185!
186!-- Determine processor topology and local array indices
187    CALL init_pegrid
188
189!
190!-- Generate grid parameters
191    CALL init_grid
192
193!
194!-- Check control parameters and deduce further quantities
195    CALL check_parameters
196
197
198!
199!-- Initialize all necessary variables
200    CALL init_3d_model
201
202!
203!-- Output of program header
204    IF ( myid == 0 )  CALL header
205
206    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
207
208!
209!-- Set start time in format hh:mm:ss
210    simulated_time_chr = time_to_string( simulated_time )
211
212!
213!-- If required, output of initial arrays
214    IF ( do2d_at_begin )  THEN
215       CALL data_output_2d( 'xy', 0 )
216       CALL data_output_2d( 'xz', 0 )
217       CALL data_output_2d( 'yz', 0 )
218    ENDIF
219    IF ( do3d_at_begin )  THEN
220       CALL data_output_3d( 0 )
221    ENDIF
222
223!
224!-- Declare and initialize variables in the accelerator memory with their
225!-- host values
226    !$acc  data copyin( diss, e, e_p, kh, km, pt, pt_p, q, ql, tend, te_m, tpt_m, tu_m, tv_m, tw_m, u, u_p, v, vpt, v_p, w, w_p )          &
227    !$acc       copyin( ddzu, ddzw, dd2zu, l_grid, l_wall, ptdf_x, ptdf_y, pt_init, rdf, rdf_sc, ug, vg, zu, zw )   &
228    !$acc       copyin( hom, qs, qsws, qswst, rif, rif_wall, shf, ts, tswst, us, usws, uswst, vsws, vswst, z0, z0h )      &
229    !$acc       copyin( fxm, fxp, fym, fyp, fwxm, fwxp, fwym, fwyp, nzb_diff_s_inner, nzb_diff_s_outer, nzb_diff_u )       &
230    !$acc       copyin( nzb_diff_v, nzb_s_inner, nzb_s_outer, nzb_u_inner )    &
231    !$acc       copyin( nzb_u_outer, nzb_v_inner, nzb_v_outer, nzb_w_inner )   &
232    !$acc       copyin( nzb_w_outer, wall_heatflux, wall_e_x, wall_e_y, wall_u, wall_v, wall_w_x, wall_w_y, wall_flags_0 )
233!
234!-- Integration of the model equations using timestep-scheme
235    CALL time_integration
236
237!
238!-- If required, write binary data for restart runs
239    IF ( write_binary(1:4) == 'true' )  THEN
240
241       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
242
243       CALL check_open( 14 )
244
245       DO  i = 0, io_blocks-1
246          IF ( i == io_group )  THEN
247!
248!--          Write flow field data
249             CALL write_3d_binary
250          ENDIF
251#if defined( __parallel )
252          CALL MPI_BARRIER( comm2d, ierr )
253#endif
254       ENDDO
255
256       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
257
258!
259!--    If required, write particle data
260       IF ( particle_advection )  CALL lpm_write_restart_file
261    ENDIF
262
263!
264!-- If required, repeat output of header including the required CPU-time
265    IF ( myid == 0 )  CALL header
266!
267!-- If required, final user-defined actions, and
268!-- last actions on the open files and close files. Unit 14 was opened
269!-- in write_3d_binary but it is closed here, to allow writing on this
270!-- unit in routine user_last_actions.
271    CALL cpu_log( log_point(4), 'last actions', 'start' )
272    DO  i = 0, io_blocks-1
273       IF ( i == io_group )  THEN
274          CALL user_last_actions
275          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
276       ENDIF
277#if defined( __parallel )
278       CALL MPI_BARRIER( comm2d, ierr )
279#endif
280    ENDDO
281    CALL close_file( 0 )
282    CALL close_dvrp
283    CALL cpu_log( log_point(4), 'last actions', 'stop' )
284
285#if defined( __mpi2 )
286!
287!-- Test exchange via intercommunicator in case of a MPI-2 coupling
288    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
289       i = 12345 + myid
290       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
291    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
292       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
293       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
294    ENDIF
295#endif
296
297!
298!-- Close the OpenACC dummy data region
299    !$acc end data
300    !$acc end data
301
302!
303!-- Take final CPU-time for CPU-time analysis
304    CALL cpu_log( log_point(1), 'total', 'stop' )
305    CALL cpu_statistics
306
307#if defined( __parallel )
308    CALL MPI_FINALIZE( ierr )
309#endif
310
311 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.