source: palm/trunk/SOURCE/palm.f90 @ 831

Last change on this file since 831 was 760, checked in by raasch, 13 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 6.9 KB
Line 
1 PROGRAM palm
2
3!------------------------------------------------------------------------------!
4! Current revisions:
5! -----------------
6!
7!
8! Former revisions:
9! -----------------
10! $Id: palm.f90 760 2011-09-15 14:37:54Z raasch $
11!
12! 759 2011-09-15 13:58:31Z raasch
13! Splitting of parallel I/O, cpu measurement for write_3d_binary and opening
14! of unit 14 moved to here
15!
16! 495 2010-03-02 00:40:15Z raasch
17! Particle data for restart runs are only written if write_binary=.T..
18!
19! 215 2008-11-18 09:54:31Z raasch
20! Initialization of coupled runs modified for MPI-1 and moved to external
21! subroutine init_coupling
22!
23! 197 2008-09-16 15:29:03Z raasch
24! Workaround for getting information about the coupling mode
25!
26! 108 2007-08-24 15:10:38Z letzel
27! Get coupling mode from environment variable, change location of debug output
28!
29! 75 2007-03-22 09:54:05Z raasch
30! __vtk directives removed, write_particles is called only in case of particle
31! advection switched on, open unit 9 for debug output,
32! setting of palm version moved from modules to here
33!
34! RCS Log replace by Id keyword, revision history cleaned up
35!
36! Revision 1.10  2006/08/04 14:53:12  raasch
37! Distibution of run description header removed, call of header moved behind
38! init_3d_model
39!
40! Revision 1.2  2001/01/25 07:15:06  raasch
41! Program name changed to PALM, module test_variables removed.
42! Initialization of dvrp logging as well as exit of dvrp moved to new
43! subroutines init_dvrp_logging and close_dvrp (file init_dvrp.f90)
44!
45! Revision 1.1  1997/07/24 11:23:35  raasch
46! Initial revision
47!
48!
49! Description:
50! ------------
51! Large-Eddy Simulation (LES) model for the convective boundary layer,
52! optimized for use on parallel machines (implementation realized using the
53! Message Passing Interface (MPI)). The model can also be run on vector machines
54! (less well optimized) and workstations. Versions for the different types of
55! machines are controlled via cpp-directives.
56! Model runs are only feasible using the ksh-script mrun.
57!------------------------------------------------------------------------------!
58
59
60    USE arrays_3d
61    USE constants
62    USE control_parameters
63    USE cpulog
64    USE dvrp_variables
65    USE grid_variables
66    USE indices
67    USE interfaces
68    USE model_1d
69    USE particle_attributes
70    USE pegrid
71    USE spectrum
72    USE statistics
73
74    IMPLICIT NONE
75
76!
77!-- Local variables
78    CHARACTER (LEN=9) ::  time_to_string
79    CHARACTER (LEN=1) ::  cdum
80    INTEGER           ::  i, run_description_header_i(80)
81
82    version = 'PALM 3.8'
83
84#if defined( __parallel )
85!
86!-- MPI initialisation. comm2d is preliminary set, because
87!-- it will be defined in init_pegrid but is used before in cpu_log.
88    CALL MPI_INIT( ierr )
89    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
90    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
91    comm_palm = MPI_COMM_WORLD
92    comm2d    = MPI_COMM_WORLD
93
94!
95!-- Initialize PE topology in case of coupled runs
96    CALL init_coupling
97#endif
98
99!
100!-- Initialize measuring of the CPU-time remaining to the run
101    CALL local_tremain_ini
102
103!
104!-- Start of total CPU time measuring.
105    CALL cpu_log( log_point(1), 'total', 'start' )
106    CALL cpu_log( log_point(2), 'initialisation', 'start' )
107
108!
109!-- Open a file for debug output
110    WRITE (myid_char,'(''_'',I4.4)')  myid
111    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
112
113!
114!-- Initialize dvrp logging. Also, one PE maybe split from the global
115!-- communicator for doing the dvrp output. In that case, the number of
116!-- PEs available for PALM is reduced by one and communicator comm_palm
117!-- is changed respectively.
118#if defined( __parallel )
119    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
120!
121!-- TEST OUTPUT (TO BE REMOVED)
122    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
123    CALL LOCAL_FLUSH( 9 )
124    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
125       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
126               TRIM( coupling_mode )
127    ENDIF
128#endif
129
130    CALL init_dvrp_logging
131
132!
133!-- Read control parameters from NAMELIST files and read environment-variables
134    CALL parin
135
136!
137!-- Determine processor topology and local array indices
138    CALL init_pegrid
139
140!
141!-- Generate grid parameters
142    CALL init_grid
143
144!
145!-- Check control parameters and deduce further quantities
146    CALL check_parameters
147
148
149!
150!-- Initialize all necessary variables
151    CALL init_3d_model
152
153!
154!-- Output of program header
155    IF ( myid == 0 )  CALL header
156
157    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
158
159!
160!-- Set start time in format hh:mm:ss
161    simulated_time_chr = time_to_string( simulated_time )
162
163!
164!-- If required, output of initial arrays
165    IF ( do2d_at_begin )  THEN
166       CALL data_output_2d( 'xy', 0 )
167       CALL data_output_2d( 'xz', 0 )
168       CALL data_output_2d( 'yz', 0 )
169    ENDIF
170    IF ( do3d_at_begin )  THEN
171       CALL data_output_3d( 0 )
172    ENDIF
173
174!
175!-- Integration of the model equations using timestep-scheme
176    CALL time_integration
177
178!
179!-- If required, write binary data for restart runs
180    IF ( write_binary(1:4) == 'true' )  THEN
181
182       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
183
184       CALL check_open( 14 )
185
186       DO  i = 0, io_blocks-1
187          IF ( i == io_group )  THEN
188!
189!--          Write flow field data
190             CALL write_3d_binary
191          ENDIF
192#if defined( __parallel )
193          CALL MPI_BARRIER( comm2d, ierr )
194#endif
195       ENDDO
196
197       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
198
199!
200!--    If required, write particle data
201       IF ( particle_advection )  CALL write_particles
202    ENDIF
203
204!
205!-- If required, repeat output of header including the required CPU-time
206    IF ( myid == 0 )  CALL header
207!
208!-- If required, final user-defined actions, and
209!-- last actions on the open files and close files. Unit 14 was opened
210!-- in write_3d_binary but it is closed here, to allow writing on this
211!-- unit in routine user_last_actions.
212    CALL cpu_log( log_point(4), 'last actions', 'start' )
213    DO  i = 0, io_blocks-1
214       IF ( i == io_group )  THEN
215          CALL user_last_actions
216          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
217       ENDIF
218#if defined( __parallel )
219       CALL MPI_BARRIER( comm2d, ierr )
220#endif
221    ENDDO
222    CALL close_file( 0 )
223    CALL close_dvrp
224    CALL cpu_log( log_point(4), 'last actions', 'stop' )
225
226#if defined( __mpi2 )
227!
228!-- Test exchange via intercommunicator in case of a MPI-2 coupling
229    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
230       i = 12345 + myid
231       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
232    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
233       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
234       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
235    ENDIF
236#endif
237
238!
239!-- Take final CPU-time for CPU-time analysis
240    CALL cpu_log( log_point(1), 'total', 'stop' )
241    CALL cpu_statistics
242
243#if defined( __parallel )
244    CALL MPI_FINALIZE( ierr )
245#endif
246
247 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.