source: palm/trunk/SOURCE/palm.f90 @ 974

Last change on this file since 974 was 863, checked in by suehring, 13 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 7.0 KB
RevLine 
[1]1 PROGRAM palm
2
3!------------------------------------------------------------------------------!
[484]4! Current revisions:
[1]5! -----------------
[850]6!
[77]7!
8! Former revisions:
9! -----------------
10! $Id: palm.f90 863 2012-03-26 14:28:17Z maronga $
11!
[863]12! Version number changed from 3.8 to 3.8a.
13!
[850]14! 849 2012-03-15 10:35:09Z raasch
15! write_particles renamed lpm_write_restart_file
16!
[760]17! 759 2011-09-15 13:58:31Z raasch
18! Splitting of parallel I/O, cpu measurement for write_3d_binary and opening
19! of unit 14 moved to here
20!
[496]21! 495 2010-03-02 00:40:15Z raasch
22! Particle data for restart runs are only written if write_binary=.T..
23!
[226]24! 215 2008-11-18 09:54:31Z raasch
25! Initialization of coupled runs modified for MPI-1 and moved to external
26! subroutine init_coupling
27!
[198]28! 197 2008-09-16 15:29:03Z raasch
29! Workaround for getting information about the coupling mode
30!
[110]31! 108 2007-08-24 15:10:38Z letzel
32! Get coupling mode from environment variable, change location of debug output
33!
[77]34! 75 2007-03-22 09:54:05Z raasch
[70]35! __vtk directives removed, write_particles is called only in case of particle
[75]36! advection switched on, open unit 9 for debug output,
37! setting of palm version moved from modules to here
[1]38!
[3]39! RCS Log replace by Id keyword, revision history cleaned up
40!
[1]41! Revision 1.10  2006/08/04 14:53:12  raasch
42! Distibution of run description header removed, call of header moved behind
43! init_3d_model
44!
45! Revision 1.2  2001/01/25 07:15:06  raasch
46! Program name changed to PALM, module test_variables removed.
47! Initialization of dvrp logging as well as exit of dvrp moved to new
48! subroutines init_dvrp_logging and close_dvrp (file init_dvrp.f90)
49!
50! Revision 1.1  1997/07/24 11:23:35  raasch
51! Initial revision
52!
53!
54! Description:
55! ------------
56! Large-Eddy Simulation (LES) model for the convective boundary layer,
57! optimized for use on parallel machines (implementation realized using the
58! Message Passing Interface (MPI)). The model can also be run on vector machines
59! (less well optimized) and workstations. Versions for the different types of
60! machines are controlled via cpp-directives.
61! Model runs are only feasible using the ksh-script mrun.
62!------------------------------------------------------------------------------!
63
64
65    USE arrays_3d
66    USE constants
[102]67    USE control_parameters
[1]68    USE cpulog
69    USE dvrp_variables
70    USE grid_variables
71    USE indices
72    USE interfaces
73    USE model_1d
74    USE particle_attributes
75    USE pegrid
76    USE spectrum
77    USE statistics
78
79    IMPLICIT NONE
80
81!
82!-- Local variables
83    CHARACTER (LEN=9) ::  time_to_string
84    CHARACTER (LEN=1) ::  cdum
[559]85    INTEGER           ::  i, run_description_header_i(80)
[1]86
[861]87    version = 'PALM 3.8a'
[75]88
[1]89#if defined( __parallel )
90!
91!-- MPI initialisation. comm2d is preliminary set, because
92!-- it will be defined in init_pegrid but is used before in cpu_log.
93    CALL MPI_INIT( ierr )
94    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
[206]95    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
[1]96    comm_palm = MPI_COMM_WORLD
97    comm2d    = MPI_COMM_WORLD
98
99!
[206]100!-- Initialize PE topology in case of coupled runs
101    CALL init_coupling
[102]102#endif
103
104!
[1]105!-- Initialize measuring of the CPU-time remaining to the run
106    CALL local_tremain_ini
107
108!
109!-- Start of total CPU time measuring.
110    CALL cpu_log( log_point(1), 'total', 'start' )
111    CALL cpu_log( log_point(2), 'initialisation', 'start' )
112
113!
[206]114!-- Open a file for debug output
115    WRITE (myid_char,'(''_'',I4.4)')  myid
116    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
117
118!
[1]119!-- Initialize dvrp logging. Also, one PE maybe split from the global
120!-- communicator for doing the dvrp output. In that case, the number of
121!-- PEs available for PALM is reduced by one and communicator comm_palm
122!-- is changed respectively.
123#if defined( __parallel )
124    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
125!
[102]126!-- TEST OUTPUT (TO BE REMOVED)
127    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
128    CALL LOCAL_FLUSH( 9 )
[215]129    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
130       PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
131               TRIM( coupling_mode )
132    ENDIF
[102]133#endif
134
[108]135    CALL init_dvrp_logging
136
[102]137!
[108]138!-- Read control parameters from NAMELIST files and read environment-variables
139    CALL parin
140
141!
142!-- Determine processor topology and local array indices
143    CALL init_pegrid
144
145!
[1]146!-- Generate grid parameters
147    CALL init_grid
148
149!
150!-- Check control parameters and deduce further quantities
151    CALL check_parameters
152
[667]153
[1]154!
155!-- Initialize all necessary variables
156    CALL init_3d_model
157
158!
159!-- Output of program header
160    IF ( myid == 0 )  CALL header
161
162    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
163
164!
165!-- Set start time in format hh:mm:ss
166    simulated_time_chr = time_to_string( simulated_time )
167
168!
169!-- If required, output of initial arrays
170    IF ( do2d_at_begin )  THEN
171       CALL data_output_2d( 'xy', 0 )
172       CALL data_output_2d( 'xz', 0 )
173       CALL data_output_2d( 'yz', 0 )
174    ENDIF
175    IF ( do3d_at_begin )  THEN
176       CALL data_output_3d( 0 )
177    ENDIF
178
179!
[495]180!-- Integration of the model equations using timestep-scheme
[1]181    CALL time_integration
182
183!
[495]184!-- If required, write binary data for restart runs
185    IF ( write_binary(1:4) == 'true' )  THEN
[759]186
187       CALL cpu_log( log_point(22), 'write_3d_binary', 'start' )
188
189       CALL check_open( 14 )
190
191       DO  i = 0, io_blocks-1
192          IF ( i == io_group )  THEN
[1]193!
[759]194!--          Write flow field data
195             CALL write_3d_binary
196          ENDIF
197#if defined( __parallel )
198          CALL MPI_BARRIER( comm2d, ierr )
199#endif
200       ENDDO
201
202       CALL cpu_log( log_point(22), 'write_3d_binary', 'stop' )
203
[495]204!
205!--    If required, write particle data
[849]206       IF ( particle_advection )  CALL lpm_write_restart_file
[495]207    ENDIF
[1]208
209!
210!-- If required, repeat output of header including the required CPU-time
211    IF ( myid == 0 )  CALL header
212!
213!-- If required, final user-defined actions, and
214!-- last actions on the open files and close files. Unit 14 was opened
215!-- in write_3d_binary but it is closed here, to allow writing on this
216!-- unit in routine user_last_actions.
217    CALL cpu_log( log_point(4), 'last actions', 'start' )
[759]218    DO  i = 0, io_blocks-1
219       IF ( i == io_group )  THEN
220          CALL user_last_actions
221          IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
222       ENDIF
223#if defined( __parallel )
224       CALL MPI_BARRIER( comm2d, ierr )
225#endif
226    ENDDO
[1]227    CALL close_file( 0 )
228    CALL close_dvrp
229    CALL cpu_log( log_point(4), 'last actions', 'stop' )
230
[102]231#if defined( __mpi2 )
[1]232!
[206]233!-- Test exchange via intercommunicator in case of a MPI-2 coupling
[102]234    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
235       i = 12345 + myid
236       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
237    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
238       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
239       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
240    ENDIF
241#endif
242
243!
[1]244!-- Take final CPU-time for CPU-time analysis
245    CALL cpu_log( log_point(1), 'total', 'stop' )
246    CALL cpu_statistics
247
248#if defined( __parallel )
249    CALL MPI_FINALIZE( ierr )
250#endif
251
252 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.