source: palm/trunk/SOURCE/parin.f90 @ 996

Last change on this file since 996 was 996, checked in by raasch, 12 years ago

parameter use_prior_plot1d_parameters removed; little reformatting

  • Property svn:keywords set to Id
File size: 16.0 KB
Line 
1 SUBROUTINE parin
2
3!------------------------------------------------------------------------------!
4! Current revisions:
5! -----------------
6! -use_prior_plot1d_parameters
7!
8! Former revisions:
9! -----------------
10! $Id: parin.f90 996 2012-09-07 10:41:47Z raasch $
11!
12! 978 2012-08-09 08:28:32Z fricke
13! -km_damp_max, outflow_damping_width
14! +pt_damping_factor, pt_damping_width
15! +z0h_factor
16!
17! 964 2012-07-26 09:14:24Z raasch
18! -cross_normalized_x, cross_normalized_y, cross_xtext, z_max_do1d,
19! z_max_do1d_normalized
20!
21! 940 2012-07-09 14:31:00Z raasch
22! +neutral in inipar
23!
24! 927 2012-06-06 19:15:04Z raasch
25! +masking_method in inipar
26!
27! 824 2012-02-17 09:09:57Z raasch
28! +curvature_solution_effects in inipar
29!
30! 809 2012-01-30 13:32:58Z maronga
31! Bugfix: replaced .AND. and .NOT. with && and ! in the preprocessor directives
32!
33! 807 2012-01-25 11:53:51Z maronga
34! New cpp directive "__check" implemented which is used by check_namelist_files
35!
36! 785 2011-11-28 09:47:19Z raasch
37! +scalar_rayleigh_damping in inipar
38!
39! 767 2011-10-14 06:39:12Z raasch
40! +u_profile, v_profile, uv_heights in inipar
41!
42! 759 2011-09-15 13:58:31Z raasch
43! +maximum_parallel_io_streams in envpar,
44! splitting of parallel I/O in blocks of PEs
45!
46! 683 2011-02-09 14:25:15Z raasch
47! +synchronous_exchange in d3par
48!
49! 667 2010-12-23 12:06:00Z suehring/gryschka
50! Steering parameter dissipation_control added in inipar. (commented out)
51!
52! 622 2010-12-10 08:08:13Z raasch
53! +collective_wait in inipar
54!
55! 600 2010-11-24 16:10:51Z raasch
56! parameters moved from d3par to inipar: call_psolver_at_all_substeps,
57! cfl_factor, cycle_mg, mg_cycles, mg_switch_to_pe0_level, ngsrb, nsor,
58! omega_sor, prandtl_number, psolver, rayleigh_damping_factor,
59! rayleigh_damping_height, residual_limit
60!
61! 580 2010-10-05 13:59:11Z heinze
62! Renaming of ws_vertical_gradient to subs_vertical_gradient and
63! ws_vertical_gradient_level to subs_vertical_gradient_level
64!
65! 553 2010-09-01 14:09:06Z weinreis
66! parameters for masked output are replaced by arrays
67!
68! 493 2010-03-01 08:30:24Z raasch
69! +netcdf_data_format in d3par, -netcdf_64bit, -netcdf_64bit_3d
70!
71! 449 2010-02-02 11:23:59Z raasch
72! +wall_humidityflux, wall_scalarflux
73! +ws_vertical_gradient, ws_vertical_gradient_level
74!
75! 410 2009-12-04 17:05:40Z letzel
76! masked data output: + dt_domask, mask_01~20_x|y|z, mask_01~20_x|y|z_loop,
77! mask_scale_x|y|z, masks, skip_time_domask
78!
79! 291 2009-04-16 12:07:26Z raasch
80! +local_dvrserver_running in envpar
81! Output of messages replaced by message handling routine.
82! +canyon_height, canyon_width_x, canyon_width_y, canyon_wall_left,
83! canyon_wall_south, conserve_volume_flow_mode, coupling_start_time,
84! dp_external, dp_level_b, dp_smooth, dpdxy, u_bulk, v_bulk in inipar
85! topography_grid_convention moved from userpar
86!
87! 197 2008-09-16 15:29:03Z raasch
88! +cthf,leaf_surface_concentration, scalar_exchange_coefficient
89! +inflow_damping_height, inflow_damping_width, recycling_width,
90! turbulent_inflow in inipar, -skip_time_dosp in d3par,
91! allocation of hom_sum moved from init_3d_model to here,
92! npex, npey moved from inipar to d3par, setting of myid_char_14 removed,
93! lad is allways allocated
94!
95! 138 2007-11-28 10:03:58Z letzel
96! +canopy_mode, drag_coefficient, lad_surface, lad_vertical_gradient,
97! lad_vertical_gradient_level, pch_index, plant_canopy,
98! +allocation of leaf area density field
99!
100! 108 2007-08-24 15:10:38Z letzel
101! +e_init, top_momentumflux_u|v in inipar, +dt_coupling in d3par
102!
103! 95 2007-06-02 16:48:38Z raasch
104! +bc_sa_t, bottom_salinityflux, ocean, sa_surface, sa_vertical_gradient,
105! sa_vertical_gradient_level, top_salinityflux in inipar,
106! sa_init is allocated
107!
108! 87 2007-05-22 15:46:47Z raasch
109! Size of hom increased by the maximum number of user-defined profiles,
110! var_hom renamed pr_palm
111!
112! 82 2007-04-16 15:40:52Z raasch
113! +return_addres, return_username in envpar
114!
115! 75 2007-03-22 09:54:05Z raasch
116! +dt_max, netcdf_64bit_3d, precipitation_amount_interval in d3par,
117! +loop_optimization, pt_reference in inipar, -data_output_ts,
118! moisture renamed humidity
119!
120! 20 2007-02-26 00:12:32Z raasch
121! +top_heatflux, use_top_fluxes in inipar
122!
123! 3 2007-02-13 11:30:58Z raasch
124! +netcdf_64bit_3d in d3par,
125! RCS Log replace by Id keyword, revision history cleaned up
126!
127! Revision 1.57  2007/02/11 13:11:22  raasch
128! Values of environment variables are now read from file ENVPAR instead of
129! reading them with a system call, + NAMELIST envpar
130!
131! Revision 1.1  1997/07/24 11:22:50  raasch
132! Initial revision
133!
134!
135! Description:
136! ------------
137! This subroutine reads variables controling the run from the NAMELIST files
138!------------------------------------------------------------------------------!
139
140    USE arrays_3d
141    USE averaging
142    USE cloud_parameters
143    USE control_parameters
144    USE dvrp_variables
145    USE grid_variables
146    USE indices
147    USE model_1d
148    USE pegrid
149    USE profil_parameter
150    USE statistics
151
152    IMPLICIT NONE
153
154    INTEGER ::  i, idum
155
156
157    NAMELIST /inipar/  adjust_mixing_length, alpha_surface, bc_e_b, bc_lr, &
158                       bc_ns, bc_p_b, bc_p_t, bc_pt_b, bc_pt_t, bc_q_b, &
159             bc_q_t,bc_s_b, bc_s_t, bc_sa_t, bc_uv_b, bc_uv_t, &
160             bottom_salinityflux, building_height, building_length_x, &
161             building_length_y, building_wall_left, building_wall_south, &
162             call_psolver_at_all_substeps, canopy_mode, canyon_height, &
163             canyon_width_x, canyon_width_y, canyon_wall_left, &
164             canyon_wall_south, cfl_factor, cloud_droplets, cloud_physics, &
165             collective_wait, conserve_volume_flow, conserve_volume_flow_mode, &
166             coupling_start_time, cthf, curvature_solution_effects, &
167             cut_spline_overshoot, &
168             cycle_mg, damp_level_1d, dissipation_1d, & !dissipation_control, &
169             dp_external, dp_level_b, dp_smooth, dpdxy, drag_coefficient, &
170             dt, dt_pr_1d, dt_run_control_1d, dx, dy, dz, dz_max, & 
171             dz_stretch_factor, dz_stretch_level, e_init, e_min, end_time_1d, &
172             fft_method, galilei_transformation, grid_matching, humidity, &
173             inflow_damping_height, inflow_damping_width, &
174             inflow_disturbance_begin, inflow_disturbance_end, &
175             initializing_actions, km_constant, lad_surface, &
176             lad_vertical_gradient, lad_vertical_gradient_level, &
177             leaf_surface_concentration, long_filter_factor, &
178             loop_optimization, masking_method, mg_cycles, &
179             mg_switch_to_pe0_level, mixing_length_1d, momentum_advec, &
180             netcdf_precision, neutral, ngsrb, nsor, &
181             nsor_ini, nx, ny, nz, ocean, omega, omega_sor, &
182             overshoot_limit_e, overshoot_limit_pt, &
183             overshoot_limit_u, overshoot_limit_v, overshoot_limit_w, &
184             passive_scalar, pch_index, phi, plant_canopy, prandtl_layer, &
185             prandtl_number, precipitation, psolver, pt_damping_factor, &
186             pt_damping_width, pt_reference, pt_surface, &
187             pt_surface_initial_change, pt_vertical_gradient, &
188             pt_vertical_gradient_level, q_surface, q_surface_initial_change, &
189             q_vertical_gradient, q_vertical_gradient_level, radiation, &
190             random_generator, random_heatflux, rayleigh_damping_factor, &
191             rayleigh_damping_height, recycling_width, residual_limit, &
192             rif_max, rif_min, roughness_length, sa_surface, &
193             sa_vertical_gradient, sa_vertical_gradient_level, scalar_advec, &
194             scalar_exchange_coefficient, scalar_rayleigh_damping, &
195             statistic_regions, subs_vertical_gradient, &
196             subs_vertical_gradient_level, surface_heatflux, surface_pressure, &
197             surface_scalarflux, surface_waterflux, s_surface, &
198             s_surface_initial_change, s_vertical_gradient, &
199             s_vertical_gradient_level, timestep_scheme, &
200             topography, topography_grid_convention, top_heatflux, &
201             top_momentumflux_u, top_momentumflux_v, top_salinityflux, &
202             turbulent_inflow, ug_surface, ug_vertical_gradient, &
203             ug_vertical_gradient_level, ups_limit_e, ups_limit_pt, &
204             ups_limit_u, ups_limit_v, ups_limit_w, use_surface_fluxes, &
205             use_top_fluxes, use_ug_for_galilei_tr, use_upstream_for_tke, &
206             uv_heights, u_bulk, u_profile, vg_surface, vg_vertical_gradient, &
207             vg_vertical_gradient_level, v_bulk, v_profile, wall_adjustment, &
208             wall_heatflux, wall_humidityflux, wall_scalarflux, z0h_factor
209
210
211    NAMELIST /d3par/  averaging_interval, averaging_interval_pr, &
212             create_disturbances, &
213             cross_profiles, cross_ts_uymax, cross_ts_uymin, &
214             data_output, data_output_format, data_output_masks, &
215             data_output_pr, data_output_2d_on_each_pe, disturbance_amplitude, &
216             disturbance_energy_limit, disturbance_level_b, &
217             disturbance_level_t, do2d_at_begin, do3d_at_begin, do3d_compress, &
218             do3d_comp_prec, dt, dt_averaging_input, dt_averaging_input_pr, &
219             dt_coupling, dt_data_output, dt_data_output_av, dt_disturb, &
220             dt_domask, dt_dopr, dt_dopr_listing, dt_dots, dt_do2d_xy, &
221             dt_do2d_xz, dt_do2d_yz, dt_do3d, dt_max, dt_restart, &
222             dt_run_control,end_time, force_print_header, mask_scale_x, &
223             mask_scale_y, mask_scale_z, mask_x, mask_y, mask_z, mask_x_loop, &
224             mask_y_loop, mask_z_loop, netcdf_data_format, normalizing_region, &
225             npex, npey, nz_do3d, precipitation_amount_interval, &
226             profile_columns, profile_rows, restart_time, section_xy, &
227             section_xz, section_yz, skip_time_data_output, &
228             skip_time_data_output_av, skip_time_dopr, skip_time_do2d_xy, &
229             skip_time_do2d_xz, skip_time_do2d_yz, skip_time_do3d, &
230             skip_time_domask, synchronous_exchange, termination_time_needed, &
231             z_max_do2d
232
233
234    NAMELIST /envpar/  host, local_dvrserver_running, maximum_cpu_time_allowed,&
235                       maximum_parallel_io_streams, revision, return_addres, &
236                       return_username, run_identifier, tasks_per_node, &
237                       write_binary
238
239!
240!-- First read values of environment variables (this NAMELIST file is
241!-- generated by mrun)
242    OPEN ( 90, FILE='ENVPAR', STATUS='OLD', FORM='FORMATTED', ERR=30 )
243    READ ( 90, envpar, ERR=31, END=32 )
244    CLOSE ( 90 )
245
246!
247!-- Calculate the number of groups into which parallel I/O is split.
248!-- The default for files which are opened by all PEs (or where each
249!-- PE opens his own independent file) is, that all PEs are doing input/output
250!-- in parallel at the same time. This might cause performance or even more
251!-- severe problems depending on the configuration of the underlying file
252!-- system.
253!-- First, set the default:
254    IF ( maximum_parallel_io_streams == -1  .OR. &
255         maximum_parallel_io_streams > numprocs )  THEN
256       maximum_parallel_io_streams = numprocs
257    ENDIF
258!
259!-- Now calculate the number of io_blocks and the io_group to which the
260!-- respective PE belongs. I/O of the groups is done in serial, but in parallel
261!-- for all PEs belonging to the same group.
262!-- These settings are repeated in init_pegrid for the communicator comm2d,
263!-- which is not available here
264    io_blocks = numprocs / maximum_parallel_io_streams
265    io_group  = MOD( myid+1, io_blocks )
266
267!
268!-- Data is read in parallel by groups of PEs
269    DO  i = 0, io_blocks-1
270       IF ( i == io_group )  THEN
271
272!
273!--       Open the NAMELIST-file which is send with this job
274          CALL check_open( 11 )
275
276!
277!--       Read the control parameters for initialization.
278!--       The namelist "inipar" must be provided in the NAMELIST-file.
279          READ ( 11, inipar, ERR=10, END=11 )
280
281#if defined ( __check )
282!
283!--       In case of a namelist file check, &inipar from the p3d file is
284!--       used. The p3d file here must be closed and the p3df file for reading
285!--       3dpar is opened.
286          IF ( check_restart == 1 )  THEN
287             CALL close_file( 11 )
288             check_restart = 2
289             CALL check_open( 11 )             
290             initializing_actions = 'read_restart_data'
291          ENDIF
292#endif
293          GOTO 12
294
295 10       message_string = 'errors in \$inipar &or no \$inipar-namelist ' // &
296                           'found (CRAY-machines only)'
297          CALL message( 'parin', 'PA0271', 1, 2, 0, 6, 0 )
298
299 11       message_string = 'no \$inipar-namelist found'
300          CALL message( 'parin', 'PA0272', 1, 2, 0, 6, 0 )
301
302!
303!--       If required, read control parameters from restart file (produced by
304!--       a prior run). All PEs are reading from file created by PE0 (see
305!--       check_open)
306 12       IF ( TRIM( initializing_actions ) == 'read_restart_data' )  THEN
307#if ! defined ( __check )
308             CALL read_var_list
309!
310!--          The restart file will be reopened when reading the subdomain data
311             CALL close_file( 13 )
312
313!
314!--          Increment the run count
315             runnr = runnr + 1
316#endif
317          ENDIF
318
319!
320!--       Definition of names of areas used for computing statistics. They must
321!--       be defined at this place, because they are allowed to be redefined by
322!--       the user in user_parin.
323          region = 'total domain'
324
325!
326!--       Read runtime parameters given by the user for this run (namelist
327!--       "d3par"). The namelist "d3par" can be omitted. In that case, default
328!--       values are used for the parameters.
329          READ ( 11, d3par, END=20 )
330
331!
332!--       Read control parameters for optionally used model software packages
333 20       CALL package_parin
334
335!
336!--       Read user-defined variables
337          CALL user_parin
338
339!
340!--       Check in case of initial run, if the grid point numbers are well
341!--       defined and allocate some arrays which are already needed in
342!--       init_pegrid or check_parameters. During restart jobs, these arrays
343!--       will be allocated in read_var_list. All other arrays are allocated
344!--       in init_3d_model.
345          IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
346
347             IF ( nx <= 0 )  THEN
348                WRITE( message_string, * ) 'no value or wrong value given', &
349                                           ' for nx: nx=', nx
350                CALL message( 'parin', 'PA0273', 1, 2, 0, 6, 0 )
351             ENDIF
352             IF ( ny <= 0 )  THEN
353                WRITE( message_string, * ) 'no value or wrong value given', &
354                                           ' for ny: ny=', ny
355                CALL message( 'parin', 'PA0274', 1, 2, 0, 6, 0 )
356             ENDIF
357             IF ( nz <= 0 )  THEN
358                WRITE( message_string, * ) 'no value or wrong value given', &
359                                           ' for nz: nz=', nz
360                CALL message( 'parin', 'PA0275', 1, 2, 0, 6, 0 )
361             ENDIF
362!
363!--          ATTENTION: in case of changes to the following statement please
364!--                  also check the allocate statement in routine read_var_list
365             ALLOCATE( lad(0:nz+1),pt_init(0:nz+1), q_init(0:nz+1),           &
366                       sa_init(0:nz+1), ug(0:nz+1), u_init(0:nz+1),           &
367                       v_init(0:nz+1), vg(0:nz+1),                            &
368                       hom(0:nz+1,2,pr_palm+max_pr_user,0:statistic_regions), &
369                       hom_sum(0:nz+1,pr_palm+max_pr_user,0:statistic_regions) )
370
371             hom = 0.0
372
373          ENDIF
374
375!
376!--       NAMELIST-file is not needed anymore
377          CALL close_file( 11 )
378
379       ENDIF
380#if defined( __parallel ) && ! ( __check )
381       CALL MPI_BARRIER( MPI_COMM_WORLD, ierr )
382#endif
383    ENDDO
384
385    RETURN
386
387 30 message_string = 'local file ENVPAR not found' // &
388                     '&some variables for steering may not be properly set'
389    CALL message( 'parin', 'PA0276', 0, 1, 0, 6, 0 )
390    RETURN
391
392 31 message_string = 'errors in local file ENVPAR' // &
393                     '&some variables for steering may not be properly set'
394    CALL message( 'parin', 'PA0277', 0, 1, 0, 6, 0 )
395    RETURN
396
397 32 message_string = 'no envpar-NAMELIST found in local file ENVPAR'  // &
398                     '&some variables for steering may not be properly set'
399    CALL message( 'parin', 'PA0278', 0, 1, 0, 6, 0 )
400
401 END SUBROUTINE parin
Note: See TracBrowser for help on using the repository browser.