source: palm/trunk/SOURCE/palm.f90 @ 206

Last change on this file since 206 was 206, checked in by raasch, 14 years ago

ocean-atmosphere coupling realized with MPI-1, adjustments in mrun, mbuild, subjob for lcxt4

  • Property svn:keywords set to Id
File size: 6.0 KB
Line 
1 PROGRAM palm
2
3!------------------------------------------------------------------------------!
4! Actual revisions:
5! -----------------
6! Initialization of coupled runs modified for MPI-1 and moved to external
7! subroutine init_coupling
8!
9! Former revisions:
10! -----------------
11! $Id: palm.f90 206 2008-10-13 14:59:11Z raasch $
12!
13! 197 2008-09-16 15:29:03Z raasch
14! Workaround for getting information about the coupling mode
15!
16! 108 2007-08-24 15:10:38Z letzel
17! Get coupling mode from environment variable, change location of debug output
18!
19! 75 2007-03-22 09:54:05Z raasch
20! __vtk directives removed, write_particles is called only in case of particle
21! advection switched on, open unit 9 for debug output,
22! setting of palm version moved from modules to here
23!
24! RCS Log replace by Id keyword, revision history cleaned up
25!
26! Revision 1.10  2006/08/04 14:53:12  raasch
27! Distibution of run description header removed, call of header moved behind
28! init_3d_model
29!
30! Revision 1.2  2001/01/25 07:15:06  raasch
31! Program name changed to PALM, module test_variables removed.
32! Initialization of dvrp logging as well as exit of dvrp moved to new
33! subroutines init_dvrp_logging and close_dvrp (file init_dvrp.f90)
34!
35! Revision 1.1  1997/07/24 11:23:35  raasch
36! Initial revision
37!
38!
39! Description:
40! ------------
41! Large-Eddy Simulation (LES) model for the convective boundary layer,
42! optimized for use on parallel machines (implementation realized using the
43! Message Passing Interface (MPI)). The model can also be run on vector machines
44! (less well optimized) and workstations. Versions for the different types of
45! machines are controlled via cpp-directives.
46! Model runs are only feasible using the ksh-script mrun.
47!------------------------------------------------------------------------------!
48
49
50    USE arrays_3d
51    USE constants
52    USE control_parameters
53    USE cpulog
54    USE dvrp_variables
55    USE grid_variables
56    USE indices
57    USE interfaces
58    USE model_1d
59    USE particle_attributes
60    USE pegrid
61    USE spectrum
62    USE statistics
63
64    IMPLICIT NONE
65
66!
67!-- Local variables
68    CHARACTER (LEN=9) ::  time_to_string
69    CHARACTER (LEN=1) ::  cdum
70    INTEGER           ::  i, run_description_header_i(80)
71
72    version = 'PALM 3.5'
73
74#if defined( __parallel )
75!
76!-- MPI initialisation. comm2d is preliminary set, because
77!-- it will be defined in init_pegrid but is used before in cpu_log.
78    CALL MPI_INIT( ierr )
79    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
80    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
81    comm_palm = MPI_COMM_WORLD
82    comm2d    = MPI_COMM_WORLD
83
84!
85!-- Initialize PE topology in case of coupled runs
86    CALL init_coupling
87#endif
88
89!
90!-- Initialize measuring of the CPU-time remaining to the run
91    CALL local_tremain_ini
92
93!
94!-- Start of total CPU time measuring.
95    CALL cpu_log( log_point(1), 'total', 'start' )
96    CALL cpu_log( log_point(2), 'initialisation', 'start' )
97
98!
99!-- Open a file for debug output
100    WRITE (myid_char,'(''_'',I4.4)')  myid
101    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
102
103!
104!-- Initialize dvrp logging. Also, one PE maybe split from the global
105!-- communicator for doing the dvrp output. In that case, the number of
106!-- PEs available for PALM is reduced by one and communicator comm_palm
107!-- is changed respectively.
108#if defined( __parallel )
109    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
110!
111!-- TEST OUTPUT (TO BE REMOVED)
112    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
113    CALL LOCAL_FLUSH( 9 )
114    PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
115            TRIM( coupling_mode )
116#endif
117
118    CALL init_dvrp_logging
119
120!
121!-- Read control parameters from NAMELIST files and read environment-variables
122    CALL parin
123
124!
125!-- Determine processor topology and local array indices
126    CALL init_pegrid
127
128!
129!-- Generate grid parameters
130    CALL init_grid
131
132!
133!-- Check control parameters and deduce further quantities
134    CALL check_parameters
135
136!
137!-- Initialize all necessary variables
138    CALL init_3d_model
139
140!
141!-- Output of program header
142    IF ( myid == 0 )  CALL header
143
144    CALL cpu_log( log_point(2), 'initialisation', 'stop' )
145
146!
147!-- Set start time in format hh:mm:ss
148    simulated_time_chr = time_to_string( simulated_time )
149
150!
151!-- If required, output of initial arrays
152    IF ( do2d_at_begin )  THEN
153       CALL data_output_2d( 'xy', 0 )
154       CALL data_output_2d( 'xz', 0 )
155       CALL data_output_2d( 'yz', 0 )
156    ENDIF
157    IF ( do3d_at_begin )  THEN
158       CALL data_output_3d( 0 )
159    ENDIF
160
161!
162!-- Integration of the model equations using the leap-frog scheme
163    CALL time_integration
164
165!
166!-- If required, write binary data for model continuation runs
167    IF ( write_binary(1:4) == 'true' )  CALL write_3d_binary
168
169!
170!-- If required, write binary particle data
171    IF ( particle_advection )  CALL write_particles
172
173!
174!-- If required, repeat output of header including the required CPU-time
175    IF ( myid == 0 )  CALL header
176
177!
178!-- If required, final user-defined actions, and
179!-- last actions on the open files and close files. Unit 14 was opened
180!-- in write_3d_binary but it is closed here, to allow writing on this
181!-- unit in routine user_last_actions.
182    CALL cpu_log( log_point(4), 'last actions', 'start' )
183    CALL user_last_actions
184    IF ( write_binary(1:4) == 'true' )  CALL close_file( 14 )
185    CALL close_file( 0 )
186    CALL close_dvrp
187    CALL cpu_log( log_point(4), 'last actions', 'stop' )
188
189#if defined( __mpi2 )
190!
191!-- Test exchange via intercommunicator in case of a MPI-2 coupling
192    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
193       i = 12345 + myid
194       CALL MPI_SEND( i, 1, MPI_INTEGER, myid, 11, comm_inter, ierr )
195    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
196       CALL MPI_RECV( i, 1, MPI_INTEGER, myid, 11, comm_inter, status, ierr )
197       PRINT*, '### myid: ', myid, '   received from atmosphere:  i = ', i
198    ENDIF
199#endif
200
201!
202!-- Take final CPU-time for CPU-time analysis
203    CALL cpu_log( log_point(1), 'total', 'stop' )
204    CALL cpu_statistics
205
206#if defined( __parallel )
207    CALL MPI_FINALIZE( ierr )
208#endif
209
210 END PROGRAM palm
Note: See TracBrowser for help on using the repository browser.