source: palm/trunk/SOURCE/palm.f90 @ 759

Last change on this file since 759 was 759, checked in by raasch, 13 years ago

New:
---

The number of parallel I/O operations can be limited with new mrun-option -w.
(advec_particles, data_output_2d, data_output_3d, header, init_grid, init_pegrid, init_3d_model, modules, palm, parin, write_3d_binary)

Changed:


mrun option -T is obligatory

Errors:


Bugfix: No zero assignments to volume_flow_initial and volume_flow_area in
case of normal restart runs. (init_3d_model)

initialization of u_0, v_0. This is just to avoid access of uninitialized
memory in exchange_horiz_2d, which causes respective error messages
when the Intel thread checker (inspector) is used. (production_e)

Bugfix for ts limitation (prandtl_fluxes)

  • Property svn:keywords set to Id
File size: 6.8 KB
Line 
1 PROGRAM palm
2
3!------------------------------------------------------------------------------!
4! Current revisions:
5! -----------------
6! Splitting of parallel I/O, cpu measurement for write_3d_binary and opening
7! of unit 14 moved to here
8!
9! Former revisions:
10! -----------------
11! $Id: palm.f90 759 2011-09-15 13:58:31Z raasch $
12!
13! 495 2010-03-02 00:40:15Z raasch
14! Particle data for restart runs are only written if write_binary=.T..
15!
16! 215 2008-11-18 09:54:31Z raasch
17! Initialization of coupled runs modified for MPI-1 and moved to external
18! subroutine init_coupling
19!
20! 197 2008-09-16 15:29:03Z raasch
21! Workaround for getting information about the coupling mode
22!
23! 108 2007-08-24 15:10:38Z letzel
24! Get coupling mode from environment variable, change location of debug output
25!
26! 75 2007-03-22 09:54:05Z raasch
27! __vtk directives removed, write_particles is called only in case of particle
28! advection switched on, open unit 9 for debug output,
29! setting of palm version moved from modules to here
30!
31! RCS Log replace by Id keyword, revision history cleaned up
32!
33! Revision 1.10  2006/08/04 14:53:12  raasch
34! Distibution of run description header removed, call of header moved behind
35! init_3d_model
36!
37! Revision 1.2  2001/01/25 07:15:06  raasch
38! Program name changed to PALM, module test_variables removed.
39! Initialization of dvrp logging as well as exit of dvrp moved to new
40! subroutines init_dvrp_logging and close_dvrp (file init_dvrp.f90)
41!
42! Revision 1.1  1997/07/24 11:23:35  raasch
43! Initial revision
44!
45!
46! Description:
47! ------------
48! Large-Eddy Simulation (LES) model for the convective boundary layer,
49! optimized for use on parallel machines (implementation realized using the
50! Message Passing Interface (MPI)). The model can also be run on vector machines
51! (less well optimized) and workstations. Versions for the different types of
52! machines are controlled via cpp-directives.
53! Model runs are only feasible using the ksh-script mrun.
54!------------------------------------------------------------------------------!
55
56
57    USE arrays_3d
58    USE constants
59    USE control_parameters
60    USE cpulog
61    USE dvrp_variables
62    USE grid_variables
63    USE indices
64    USE interfaces
65    USE model_1d
66    USE particle_attributes
67    USE pegrid
68    USE spectrum
69    USE statistics
70
71    IMPLICIT NONE
72
73!
74!-- Local variables
75    CHARACTER (LEN=9) ::  time_to_string
76    CHARACTER (LEN=1) ::  cdum
77    INTEGER           ::  i, run_description_header_i(80)
78
79    version = 'PALM 3.8'
80
81#if defined( __parallel )
82!
83!-- MPI initialisation. comm2d is preliminary set, because
84!-- it will be defined in init_pegrid but is used before in cpu_log.
85    CALL MPI_INIT( ierr )
86    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
87    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
88    comm_palm = MPI_COMM_WORLD
89    comm2d    = MPI_COMM_WORLD
90
91!
92!-- Initialize PE topology in case of coupled runs
93    CALL init_coupling
94#endif
95
96!
97!-- Initialize measuring of the CPU-time remaining to the run
98    CALL local_tremain_ini
99
100!
101!-- Start of total CPU time measuring.
102    CALL cpu_log( log_point(1), 'total', 'start' )
103    CALL cpu_log( log_point(2), 'initialisation', 'start' )
104
105!
106!-- Open a file for debug output
107    WRITE (myid_char,'(''_'',I4.4)')  myid
108    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
109
110!
111!-- Initialize dvrp logging. Also, one PE maybe split from the global
112!-- communicator for doing the dvrp output. In that case, the number of
113!-- PEs available for PALM is reduced by one and communicator comm_palm
114!-- is changed respectively.
115#if defined( __parallel )
116    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
117!
118!-- TEST OUTPUT (TO BE REMOVED)
119    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
120    CALL LOCAL_FLUSH( 9 )
121    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
122       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
123               TRIM( coupling_mode )
124    ENDIF
125#endif
126
127    CALL init_dvrp_logging
128
129!
130!-- Read control parameters from NAMELIST files and read environment-variables
131    CALL parin
132
133!
134!-- Determine processor topology and local array indices
135    CALL init_pegrid
136
137!
138!-- Generate grid parameters
139    CALL init_grid
140
141!
142!-- Check control parameters and deduce further quantities
143    CALL check_parameters
144
145
146!
147!-- Initialize all necessary variables
148    CALL init_3d_model
149
150!
151!-- Output of program header
152    IF ( myid == 0 )  CALL header
153
154    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
155
156!
157!-- Set start time in format hh:mm:ss
158    simulated_time_chr = time_to_string( simulated_time )
159
160!
161!-- If required, output of initial arrays
162    IF ( do2d_at_begin )  THEN
163       CALL data_output_2d( 'xy', 0 )
164       CALL data_output_2d( 'xz', 0 )
165       CALL data_output_2d( 'yz', 0 )
166    ENDIF
167    IF ( do3d_at_begin )  THEN
168       CALL data_output_3d( 0 )
169    ENDIF
170
171!
172!-- Integration of the model equations using timestep-scheme
173    CALL time_integration
174
175!
176!-- If required, write binary data for restart runs
177    IF ( write_binary(1:4) == 'true' )  THEN
178
179       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
180
181       CALL check_open( 14 )
182
183       DO  i = 0, io_blocks-1
184          IF ( i == io_group )  THEN
185!
186!--          Write flow field data
187             CALL write_3d_binary
188          ENDIF
189#if defined( __parallel )
190          CALL MPI_BARRIER( comm2d, ierr )
191#endif
192       ENDDO
193
194       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
195
196!
197!--    If required, write particle data
198       IF ( particle_advection )  CALL write_particles
199    ENDIF
200
201!
202!-- If required, repeat output of header including the required CPU-time
203    IF ( myid == 0 )  CALL header
204!
205!-- If required, final user-defined actions, and
206!-- last actions on the open files and close files. Unit 14 was opened
207!-- in write_3d_binary but it is closed here, to allow writing on this
208!-- unit in routine user_last_actions.
209    CALL cpu_log( log_point(4), 'last actions', 'start' )
210    DO  i = 0, io_blocks-1
211       IF ( i == io_group )  THEN
212          CALL user_last_actions
213          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
214       ENDIF
215#if defined( __parallel )
216       CALL MPI_BARRIER( comm2d, ierr )
217#endif
218    ENDDO
219    CALL close_file( 0 )
220    CALL close_dvrp
221    CALL cpu_log( log_point(4), 'last actions', 'stop' )
222
223#if defined( __mpi2 )
224!
225!-- Test exchange via intercommunicator in case of a MPI-2 coupling
226    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
227       i = 12345 + myid
228       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
229    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
230       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
231       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
232    ENDIF
233#endif
234
235!
236!-- Take final CPU-time for CPU-time analysis
237    CALL cpu_log( log_point(1), 'total', 'stop' )
238    CALL cpu_statistics
239
240#if defined( __parallel )
241    CALL MPI_FINALIZE( ierr )
242#endif
243
244 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.