source: palm/trunk/SOURCE/parin.f90 @ 997

Last change on this file since 997 was 997, checked in by raasch, 12 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 16.1 KB
Line 
1 SUBROUTINE parin
2
3!------------------------------------------------------------------------------!
4! Current revisions:
5! -----------------
6!
7!
8! Former revisions:
9! -----------------
10! $Id: parin.f90 997 2012-09-07 10:53:03Z raasch $
11!
12! 996 2012-09-07 10:41:47Z raasch
13! -use_prior_plot1d_parameters
14!
15! 978 2012-08-09 08:28:32Z fricke
16! -km_damp_max, outflow_damping_width
17! +pt_damping_factor, pt_damping_width
18! +z0h_factor
19!
20! 964 2012-07-26 09:14:24Z raasch
21! -cross_normalized_x, cross_normalized_y, cross_xtext, z_max_do1d,
22! z_max_do1d_normalized
23!
24! 940 2012-07-09 14:31:00Z raasch
25! +neutral in inipar
26!
27! 927 2012-06-06 19:15:04Z raasch
28! +masking_method in inipar
29!
30! 824 2012-02-17 09:09:57Z raasch
31! +curvature_solution_effects in inipar
32!
33! 809 2012-01-30 13:32:58Z maronga
34! Bugfix: replaced .AND. and .NOT. with && and ! in the preprocessor directives
35!
36! 807 2012-01-25 11:53:51Z maronga
37! New cpp directive "__check" implemented which is used by check_namelist_files
38!
39! 785 2011-11-28 09:47:19Z raasch
40! +scalar_rayleigh_damping in inipar
41!
42! 767 2011-10-14 06:39:12Z raasch
43! +u_profile, v_profile, uv_heights in inipar
44!
45! 759 2011-09-15 13:58:31Z raasch
46! +maximum_parallel_io_streams in envpar,
47! splitting of parallel I/O in blocks of PEs
48!
49! 683 2011-02-09 14:25:15Z raasch
50! +synchronous_exchange in d3par
51!
52! 667 2010-12-23 12:06:00Z suehring/gryschka
53! Steering parameter dissipation_control added in inipar. (commented out)
54!
55! 622 2010-12-10 08:08:13Z raasch
56! +collective_wait in inipar
57!
58! 600 2010-11-24 16:10:51Z raasch
59! parameters moved from d3par to inipar: call_psolver_at_all_substeps,
60! cfl_factor, cycle_mg, mg_cycles, mg_switch_to_pe0_level, ngsrb, nsor,
61! omega_sor, prandtl_number, psolver, rayleigh_damping_factor,
62! rayleigh_damping_height, residual_limit
63!
64! 580 2010-10-05 13:59:11Z heinze
65! Renaming of ws_vertical_gradient to subs_vertical_gradient and
66! ws_vertical_gradient_level to subs_vertical_gradient_level
67!
68! 553 2010-09-01 14:09:06Z weinreis
69! parameters for masked output are replaced by arrays
70!
71! 493 2010-03-01 08:30:24Z raasch
72! +netcdf_data_format in d3par, -netcdf_64bit, -netcdf_64bit_3d
73!
74! 449 2010-02-02 11:23:59Z raasch
75! +wall_humidityflux, wall_scalarflux
76! +ws_vertical_gradient, ws_vertical_gradient_level
77!
78! 410 2009-12-04 17:05:40Z letzel
79! masked data output: + dt_domask, mask_01~20_x|y|z, mask_01~20_x|y|z_loop,
80! mask_scale_x|y|z, masks, skip_time_domask
81!
82! 291 2009-04-16 12:07:26Z raasch
83! +local_dvrserver_running in envpar
84! Output of messages replaced by message handling routine.
85! +canyon_height, canyon_width_x, canyon_width_y, canyon_wall_left,
86! canyon_wall_south, conserve_volume_flow_mode, coupling_start_time,
87! dp_external, dp_level_b, dp_smooth, dpdxy, u_bulk, v_bulk in inipar
88! topography_grid_convention moved from userpar
89!
90! 197 2008-09-16 15:29:03Z raasch
91! +cthf,leaf_surface_concentration, scalar_exchange_coefficient
92! +inflow_damping_height, inflow_damping_width, recycling_width,
93! turbulent_inflow in inipar, -skip_time_dosp in d3par,
94! allocation of hom_sum moved from init_3d_model to here,
95! npex, npey moved from inipar to d3par, setting of myid_char_14 removed,
96! lad is allways allocated
97!
98! 138 2007-11-28 10:03:58Z letzel
99! +canopy_mode, drag_coefficient, lad_surface, lad_vertical_gradient,
100! lad_vertical_gradient_level, pch_index, plant_canopy,
101! +allocation of leaf area density field
102!
103! 108 2007-08-24 15:10:38Z letzel
104! +e_init, top_momentumflux_u|v in inipar, +dt_coupling in d3par
105!
106! 95 2007-06-02 16:48:38Z raasch
107! +bc_sa_t, bottom_salinityflux, ocean, sa_surface, sa_vertical_gradient,
108! sa_vertical_gradient_level, top_salinityflux in inipar,
109! sa_init is allocated
110!
111! 87 2007-05-22 15:46:47Z raasch
112! Size of hom increased by the maximum number of user-defined profiles,
113! var_hom renamed pr_palm
114!
115! 82 2007-04-16 15:40:52Z raasch
116! +return_addres, return_username in envpar
117!
118! 75 2007-03-22 09:54:05Z raasch
119! +dt_max, netcdf_64bit_3d, precipitation_amount_interval in d3par,
120! +loop_optimization, pt_reference in inipar, -data_output_ts,
121! moisture renamed humidity
122!
123! 20 2007-02-26 00:12:32Z raasch
124! +top_heatflux, use_top_fluxes in inipar
125!
126! 3 2007-02-13 11:30:58Z raasch
127! +netcdf_64bit_3d in d3par,
128! RCS Log replace by Id keyword, revision history cleaned up
129!
130! Revision 1.57  2007/02/11 13:11:22  raasch
131! Values of environment variables are now read from file ENVPAR instead of
132! reading them with a system call, + NAMELIST envpar
133!
134! Revision 1.1  1997/07/24 11:22:50  raasch
135! Initial revision
136!
137!
138! Description:
139! ------------
140! This subroutine reads variables controling the run from the NAMELIST files
141!------------------------------------------------------------------------------!
142
143    USE arrays_3d
144    USE averaging
145    USE cloud_parameters
146    USE control_parameters
147    USE dvrp_variables
148    USE grid_variables
149    USE indices
150    USE model_1d
151    USE pegrid
152    USE profil_parameter
153    USE statistics
154
155    IMPLICIT NONE
156
157    INTEGER ::  i, idum
158
159
160    NAMELIST /inipar/  adjust_mixing_length, alpha_surface, bc_e_b, bc_lr, &
161                       bc_ns, bc_p_b, bc_p_t, bc_pt_b, bc_pt_t, bc_q_b, &
162             bc_q_t,bc_s_b, bc_s_t, bc_sa_t, bc_uv_b, bc_uv_t, &
163             bottom_salinityflux, building_height, building_length_x, &
164             building_length_y, building_wall_left, building_wall_south, &
165             call_psolver_at_all_substeps, canopy_mode, canyon_height, &
166             canyon_width_x, canyon_width_y, canyon_wall_left, &
167             canyon_wall_south, cfl_factor, cloud_droplets, cloud_physics, &
168             collective_wait, conserve_volume_flow, conserve_volume_flow_mode, &
169             coupling_start_time, cthf, curvature_solution_effects, &
170             cut_spline_overshoot, &
171             cycle_mg, damp_level_1d, dissipation_1d, & !dissipation_control, &
172             dp_external, dp_level_b, dp_smooth, dpdxy, drag_coefficient, &
173             dt, dt_pr_1d, dt_run_control_1d, dx, dy, dz, dz_max, & 
174             dz_stretch_factor, dz_stretch_level, e_init, e_min, end_time_1d, &
175             fft_method, galilei_transformation, grid_matching, humidity, &
176             inflow_damping_height, inflow_damping_width, &
177             inflow_disturbance_begin, inflow_disturbance_end, &
178             initializing_actions, km_constant, lad_surface, &
179             lad_vertical_gradient, lad_vertical_gradient_level, &
180             leaf_surface_concentration, long_filter_factor, &
181             loop_optimization, masking_method, mg_cycles, &
182             mg_switch_to_pe0_level, mixing_length_1d, momentum_advec, &
183             netcdf_precision, neutral, ngsrb, nsor, &
184             nsor_ini, nx, ny, nz, ocean, omega, omega_sor, &
185             overshoot_limit_e, overshoot_limit_pt, &
186             overshoot_limit_u, overshoot_limit_v, overshoot_limit_w, &
187             passive_scalar, pch_index, phi, plant_canopy, prandtl_layer, &
188             prandtl_number, precipitation, psolver, pt_damping_factor, &
189             pt_damping_width, pt_reference, pt_surface, &
190             pt_surface_initial_change, pt_vertical_gradient, &
191             pt_vertical_gradient_level, q_surface, q_surface_initial_change, &
192             q_vertical_gradient, q_vertical_gradient_level, radiation, &
193             random_generator, random_heatflux, rayleigh_damping_factor, &
194             rayleigh_damping_height, recycling_width, residual_limit, &
195             rif_max, rif_min, roughness_length, sa_surface, &
196             sa_vertical_gradient, sa_vertical_gradient_level, scalar_advec, &
197             scalar_exchange_coefficient, scalar_rayleigh_damping, &
198             statistic_regions, subs_vertical_gradient, &
199             subs_vertical_gradient_level, surface_heatflux, surface_pressure, &
200             surface_scalarflux, surface_waterflux, s_surface, &
201             s_surface_initial_change, s_vertical_gradient, &
202             s_vertical_gradient_level, timestep_scheme, &
203             topography, topography_grid_convention, top_heatflux, &
204             top_momentumflux_u, top_momentumflux_v, top_salinityflux, &
205             turbulent_inflow, ug_surface, ug_vertical_gradient, &
206             ug_vertical_gradient_level, ups_limit_e, ups_limit_pt, &
207             ups_limit_u, ups_limit_v, ups_limit_w, use_surface_fluxes, &
208             use_top_fluxes, use_ug_for_galilei_tr, use_upstream_for_tke, &
209             uv_heights, u_bulk, u_profile, vg_surface, vg_vertical_gradient, &
210             vg_vertical_gradient_level, v_bulk, v_profile, wall_adjustment, &
211             wall_heatflux, wall_humidityflux, wall_scalarflux, z0h_factor
212
213
214    NAMELIST /d3par/  averaging_interval, averaging_interval_pr, &
215             create_disturbances, &
216             cross_profiles, cross_ts_uymax, cross_ts_uymin, &
217             data_output, data_output_format, data_output_masks, &
218             data_output_pr, data_output_2d_on_each_pe, disturbance_amplitude, &
219             disturbance_energy_limit, disturbance_level_b, &
220             disturbance_level_t, do2d_at_begin, do3d_at_begin, do3d_compress, &
221             do3d_comp_prec, dt, dt_averaging_input, dt_averaging_input_pr, &
222             dt_coupling, dt_data_output, dt_data_output_av, dt_disturb, &
223             dt_domask, dt_dopr, dt_dopr_listing, dt_dots, dt_do2d_xy, &
224             dt_do2d_xz, dt_do2d_yz, dt_do3d, dt_max, dt_restart, &
225             dt_run_control,end_time, force_print_header, mask_scale_x, &
226             mask_scale_y, mask_scale_z, mask_x, mask_y, mask_z, mask_x_loop, &
227             mask_y_loop, mask_z_loop, netcdf_data_format, normalizing_region, &
228             npex, npey, nz_do3d, precipitation_amount_interval, &
229             profile_columns, profile_rows, restart_time, section_xy, &
230             section_xz, section_yz, skip_time_data_output, &
231             skip_time_data_output_av, skip_time_dopr, skip_time_do2d_xy, &
232             skip_time_do2d_xz, skip_time_do2d_yz, skip_time_do3d, &
233             skip_time_domask, synchronous_exchange, termination_time_needed, &
234             z_max_do2d
235
236
237    NAMELIST /envpar/  host, local_dvrserver_running, maximum_cpu_time_allowed,&
238                       maximum_parallel_io_streams, revision, return_addres, &
239                       return_username, run_identifier, tasks_per_node, &
240                       write_binary
241
242!
243!-- First read values of environment variables (this NAMELIST file is
244!-- generated by mrun)
245    OPEN ( 90, FILE='ENVPAR', STATUS='OLD', FORM='FORMATTED', ERR=30 )
246    READ ( 90, envpar, ERR=31, END=32 )
247    CLOSE ( 90 )
248
249!
250!-- Calculate the number of groups into which parallel I/O is split.
251!-- The default for files which are opened by all PEs (or where each
252!-- PE opens his own independent file) is, that all PEs are doing input/output
253!-- in parallel at the same time. This might cause performance or even more
254!-- severe problems depending on the configuration of the underlying file
255!-- system.
256!-- First, set the default:
257    IF ( maximum_parallel_io_streams == -1  .OR. &
258         maximum_parallel_io_streams > numprocs )  THEN
259       maximum_parallel_io_streams = numprocs
260    ENDIF
261!
262!-- Now calculate the number of io_blocks and the io_group to which the
263!-- respective PE belongs. I/O of the groups is done in serial, but in parallel
264!-- for all PEs belonging to the same group.
265!-- These settings are repeated in init_pegrid for the communicator comm2d,
266!-- which is not available here
267    io_blocks = numprocs / maximum_parallel_io_streams
268    io_group  = MOD( myid+1, io_blocks )
269
270!
271!-- Data is read in parallel by groups of PEs
272    DO  i = 0, io_blocks-1
273       IF ( i == io_group )  THEN
274
275!
276!--       Open the NAMELIST-file which is send with this job
277          CALL check_open( 11 )
278
279!
280!--       Read the control parameters for initialization.
281!--       The namelist "inipar" must be provided in the NAMELIST-file.
282          READ ( 11, inipar, ERR=10, END=11 )
283
284#if defined ( __check )
285!
286!--       In case of a namelist file check, &inipar from the p3d file is
287!--       used. The p3d file here must be closed and the p3df file for reading
288!--       3dpar is opened.
289          IF ( check_restart == 1 )  THEN
290             CALL close_file( 11 )
291             check_restart = 2
292             CALL check_open( 11 )             
293             initializing_actions = 'read_restart_data'
294          ENDIF
295#endif
296          GOTO 12
297
298 10       message_string = 'errors in \$inipar &or no \$inipar-namelist ' // &
299                           'found (CRAY-machines only)'
300          CALL message( 'parin', 'PA0271', 1, 2, 0, 6, 0 )
301
302 11       message_string = 'no \$inipar-namelist found'
303          CALL message( 'parin', 'PA0272', 1, 2, 0, 6, 0 )
304
305!
306!--       If required, read control parameters from restart file (produced by
307!--       a prior run). All PEs are reading from file created by PE0 (see
308!--       check_open)
309 12       IF ( TRIM( initializing_actions ) == 'read_restart_data' )  THEN
310#if ! defined ( __check )
311             CALL read_var_list
312!
313!--          The restart file will be reopened when reading the subdomain data
314             CALL close_file( 13 )
315
316!
317!--          Increment the run count
318             runnr = runnr + 1
319#endif
320          ENDIF
321
322!
323!--       Definition of names of areas used for computing statistics. They must
324!--       be defined at this place, because they are allowed to be redefined by
325!--       the user in user_parin.
326          region = 'total domain'
327
328!
329!--       Read runtime parameters given by the user for this run (namelist
330!--       "d3par"). The namelist "d3par" can be omitted. In that case, default
331!--       values are used for the parameters.
332          READ ( 11, d3par, END=20 )
333
334!
335!--       Read control parameters for optionally used model software packages
336 20       CALL package_parin
337
338!
339!--       Read user-defined variables
340          CALL user_parin
341
342!
343!--       Check in case of initial run, if the grid point numbers are well
344!--       defined and allocate some arrays which are already needed in
345!--       init_pegrid or check_parameters. During restart jobs, these arrays
346!--       will be allocated in read_var_list. All other arrays are allocated
347!--       in init_3d_model.
348          IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
349
350             IF ( nx <= 0 )  THEN
351                WRITE( message_string, * ) 'no value or wrong value given', &
352                                           ' for nx: nx=', nx
353                CALL message( 'parin', 'PA0273', 1, 2, 0, 6, 0 )
354             ENDIF
355             IF ( ny <= 0 )  THEN
356                WRITE( message_string, * ) 'no value or wrong value given', &
357                                           ' for ny: ny=', ny
358                CALL message( 'parin', 'PA0274', 1, 2, 0, 6, 0 )
359             ENDIF
360             IF ( nz <= 0 )  THEN
361                WRITE( message_string, * ) 'no value or wrong value given', &
362                                           ' for nz: nz=', nz
363                CALL message( 'parin', 'PA0275', 1, 2, 0, 6, 0 )
364             ENDIF
365!
366!--          ATTENTION: in case of changes to the following statement please
367!--                  also check the allocate statement in routine read_var_list
368             ALLOCATE( lad(0:nz+1),pt_init(0:nz+1), q_init(0:nz+1),           &
369                       sa_init(0:nz+1), ug(0:nz+1), u_init(0:nz+1),           &
370                       v_init(0:nz+1), vg(0:nz+1),                            &
371                       hom(0:nz+1,2,pr_palm+max_pr_user,0:statistic_regions), &
372                       hom_sum(0:nz+1,pr_palm+max_pr_user,0:statistic_regions) )
373
374             hom = 0.0
375
376          ENDIF
377
378!
379!--       NAMELIST-file is not needed anymore
380          CALL close_file( 11 )
381
382       ENDIF
383#if defined( __parallel ) && ! ( __check )
384       CALL MPI_BARRIER( MPI_COMM_WORLD, ierr )
385#endif
386    ENDDO
387
388    RETURN
389
390 30 message_string = 'local file ENVPAR not found' // &
391                     '&some variables for steering may not be properly set'
392    CALL message( 'parin', 'PA0276', 0, 1, 0, 6, 0 )
393    RETURN
394
395 31 message_string = 'errors in local file ENVPAR' // &
396                     '&some variables for steering may not be properly set'
397    CALL message( 'parin', 'PA0277', 0, 1, 0, 6, 0 )
398    RETURN
399
400 32 message_string = 'no envpar-NAMELIST found in local file ENVPAR'  // &
401                     '&some variables for steering may not be properly set'
402    CALL message( 'parin', 'PA0278', 0, 1, 0, 6, 0 )
403
404 END SUBROUTINE parin
Note: See TracBrowser for help on using the repository browser.