source: palm/trunk/SOURCE/parin.f90 @ 856

Last change on this file since 856 was 826, checked in by raasch, 13 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 16.1 KB
RevLine 
[1]1 SUBROUTINE parin
2
3!------------------------------------------------------------------------------!
[257]4! Current revisions:
[1]5! -----------------
[826]6!
[601]7!
8! Former revisions:
9! -----------------
10! $Id: parin.f90 826 2012-02-19 03:41:34Z suehring $
11!
[826]12! 824 2012-02-17 09:09:57Z raasch
13! +curvature_solution_effects in inipar
14!
[810]15! 809 2012-01-30 13:32:58Z maronga
16! Bugfix: replaced .AND. and .NOT. with && and ! in the preprocessor directives
17!
[808]18! 807 2012-01-25 11:53:51Z maronga
19! New cpp directive "__check" implemented which is used by check_namelist_files
20!
[786]21! 785 2011-11-28 09:47:19Z raasch
22! +scalar_rayleigh_damping in inipar
23!
[768]24! 767 2011-10-14 06:39:12Z raasch
25! +u_profile, v_profile, uv_heights in inipar
26!
[760]27! 759 2011-09-15 13:58:31Z raasch
28! +maximum_parallel_io_streams in envpar,
29! splitting of parallel I/O in blocks of PEs
30!
[684]31! 683 2011-02-09 14:25:15Z raasch
32! +synchronous_exchange in d3par
33!
[668]34! 667 2010-12-23 12:06:00Z suehring/gryschka
35! Steering parameter dissipation_control added in inipar. (commented out)
36!
[623]37! 622 2010-12-10 08:08:13Z raasch
38! +collective_wait in inipar
39!
[601]40! 600 2010-11-24 16:10:51Z raasch
[600]41! parameters moved from d3par to inipar: call_psolver_at_all_substeps,
42! cfl_factor, cycle_mg, mg_cycles, mg_switch_to_pe0_level, ngsrb, nsor,
43! omega_sor, prandtl_number, psolver, rayleigh_damping_factor,
44! rayleigh_damping_height, residual_limit
[392]45!
[581]46! 580 2010-10-05 13:59:11Z heinze
47! Renaming of ws_vertical_gradient to subs_vertical_gradient and
48! ws_vertical_gradient_level to subs_vertical_gradient_level
49!
[554]50! 553 2010-09-01 14:09:06Z weinreis
51! parameters for masked output are replaced by arrays
52!
[494]53! 493 2010-03-01 08:30:24Z raasch
54! +netcdf_data_format in d3par, -netcdf_64bit, -netcdf_64bit_3d
55!
[482]56! 449 2010-02-02 11:23:59Z raasch
57! +wall_humidityflux, wall_scalarflux
58! +ws_vertical_gradient, ws_vertical_gradient_level
59!
[449]60! 410 2009-12-04 17:05:40Z letzel
61! masked data output: + dt_domask, mask_01~20_x|y|z, mask_01~20_x|y|z_loop,
[493]62! mask_scale_x|y|z, masks, skip_time_domask
[449]63!
[392]64! 291 2009-04-16 12:07:26Z raasch
[260]65! +local_dvrserver_running in envpar
[257]66! Output of messages replaced by message handling routine.
[240]67! +canyon_height, canyon_width_x, canyon_width_y, canyon_wall_left,
[291]68! canyon_wall_south, conserve_volume_flow_mode, coupling_start_time,
69! dp_external, dp_level_b, dp_smooth, dpdxy, u_bulk, v_bulk in inipar
[256]70! topography_grid_convention moved from userpar
[1]71!
[198]72! 197 2008-09-16 15:29:03Z raasch
73! +cthf,leaf_surface_concentration, scalar_exchange_coefficient
74! +inflow_damping_height, inflow_damping_width, recycling_width,
75! turbulent_inflow in inipar, -skip_time_dosp in d3par,
76! allocation of hom_sum moved from init_3d_model to here,
77! npex, npey moved from inipar to d3par, setting of myid_char_14 removed,
78! lad is allways allocated
79!
[139]80! 138 2007-11-28 10:03:58Z letzel
81! +canopy_mode, drag_coefficient, lad_surface, lad_vertical_gradient,
82! lad_vertical_gradient_level, pch_index, plant_canopy,
83! +allocation of leaf area density field
84!
[110]85! 108 2007-08-24 15:10:38Z letzel
86! +e_init, top_momentumflux_u|v in inipar, +dt_coupling in d3par
87!
[98]88! 95 2007-06-02 16:48:38Z raasch
89! +bc_sa_t, bottom_salinityflux, ocean, sa_surface, sa_vertical_gradient,
90! sa_vertical_gradient_level, top_salinityflux in inipar,
91! sa_init is allocated
92!
[90]93! 87 2007-05-22 15:46:47Z raasch
94! Size of hom increased by the maximum number of user-defined profiles,
95! var_hom renamed pr_palm
96!
[83]97! 82 2007-04-16 15:40:52Z raasch
98! +return_addres, return_username in envpar
99!
[77]100! 75 2007-03-22 09:54:05Z raasch
101! +dt_max, netcdf_64bit_3d, precipitation_amount_interval in d3par,
102! +loop_optimization, pt_reference in inipar, -data_output_ts,
103! moisture renamed humidity
104!
[39]105! 20 2007-02-26 00:12:32Z raasch
106! +top_heatflux, use_top_fluxes in inipar
107!
[7]108! 3 2007-02-13 11:30:58Z raasch
[3]109! +netcdf_64bit_3d in d3par,
110! RCS Log replace by Id keyword, revision history cleaned up
[2]111!
[1]112! Revision 1.57  2007/02/11 13:11:22  raasch
113! Values of environment variables are now read from file ENVPAR instead of
114! reading them with a system call, + NAMELIST envpar
115!
116! Revision 1.1  1997/07/24 11:22:50  raasch
117! Initial revision
118!
119!
120! Description:
121! ------------
122! This subroutine reads variables controling the run from the NAMELIST files
123!------------------------------------------------------------------------------!
124
125    USE arrays_3d
126    USE averaging
[824]127    USE cloud_parameters
[1]128    USE control_parameters
[260]129    USE dvrp_variables
[1]130    USE grid_variables
131    USE indices
132    USE model_1d
133    USE pegrid
134    USE profil_parameter
135    USE statistics
136
137    IMPLICIT NONE
138
[759]139    INTEGER ::  i, idum
[1]140
141
142    NAMELIST /inipar/  adjust_mixing_length, alpha_surface, bc_e_b, bc_lr, &
143                       bc_ns, bc_p_b, bc_p_t, bc_pt_b, bc_pt_t, bc_q_b, &
[95]144             bc_q_t,bc_s_b, bc_s_t, bc_sa_t, bc_uv_b, bc_uv_t, &
[138]145             bottom_salinityflux, building_height, building_length_x, &
[240]146             building_length_y, building_wall_left, building_wall_south, &
[600]147             call_psolver_at_all_substeps, canopy_mode, canyon_height, &
148             canyon_width_x, canyon_width_y, canyon_wall_left, &
149             canyon_wall_south, cfl_factor, cloud_droplets, cloud_physics, &
[622]150             collective_wait, conserve_volume_flow, conserve_volume_flow_mode, &
[824]151             coupling_start_time, cthf, curvature_solution_effects, &
152             cut_spline_overshoot, &
[667]153             cycle_mg, damp_level_1d, dissipation_1d, & !dissipation_control, &
154             dp_external, dp_level_b, dp_smooth, dpdxy, drag_coefficient, &
155             dt, dt_pr_1d, dt_run_control_1d, dx, dy, dz, dz_max, & 
156             dz_stretch_factor, dz_stretch_level, e_init, e_min, end_time_1d, &
157             fft_method, galilei_transformation, grid_matching, humidity, &
[151]158             inflow_damping_height, inflow_damping_width, &
[94]159             inflow_disturbance_begin, inflow_disturbance_end, &
[138]160             initializing_actions, km_constant, km_damp_max, lad_surface, &
161             lad_vertical_gradient, lad_vertical_gradient_level, &
[153]162             leaf_surface_concentration, long_filter_factor, &
[600]163             loop_optimization, mg_cycles, mg_switch_to_pe0_level, &
164             mixing_length_1d, momentum_advec, netcdf_precision, ngsrb, nsor, &
165             nsor_ini, nx, ny, nz, ocean, omega, omega_sor, &
166             outflow_damping_width, overshoot_limit_e, overshoot_limit_pt, &
167             overshoot_limit_u, overshoot_limit_v, overshoot_limit_w, &
168             passive_scalar, pch_index, phi, plant_canopy, prandtl_layer, &
169             prandtl_number, precipitation, psolver, pt_reference, pt_surface, &
170             pt_surface_initial_change, pt_vertical_gradient, &
[94]171             pt_vertical_gradient_level, q_surface, q_surface_initial_change, &
172             q_vertical_gradient, q_vertical_gradient_level, radiation, &
[600]173             random_generator, random_heatflux, rayleigh_damping_factor, &
174             rayleigh_damping_height, recycling_width, residual_limit, &
175             rif_max, rif_min, roughness_length, sa_surface, &
176             sa_vertical_gradient, sa_vertical_gradient_level, scalar_advec, &
[785]177             scalar_exchange_coefficient, scalar_rayleigh_damping, &
178             statistic_regions, subs_vertical_gradient, &
179             subs_vertical_gradient_level, surface_heatflux, surface_pressure, &
180             surface_scalarflux, surface_waterflux, s_surface, &
181             s_surface_initial_change, s_vertical_gradient, &
182             s_vertical_gradient_level, timestep_scheme, &
[256]183             topography, topography_grid_convention, top_heatflux, &
[600]184             top_momentumflux_u, top_momentumflux_v, top_salinityflux, &
185             turbulent_inflow, ug_surface, ug_vertical_gradient, &
[767]186             ug_vertical_gradient_level, ups_limit_e, ups_limit_pt, &
[600]187             ups_limit_u, ups_limit_v, ups_limit_w, use_surface_fluxes, &
188             use_top_fluxes, use_ug_for_galilei_tr, use_upstream_for_tke, &
[767]189             uv_heights, u_bulk, u_profile, vg_surface, vg_vertical_gradient, &
190             vg_vertical_gradient_level, v_bulk, v_profile, wall_adjustment, &
191             wall_heatflux, wall_humidityflux, wall_scalarflux
[1]192
193
[600]194    NAMELIST /d3par/  averaging_interval, averaging_interval_pr, &
195             create_disturbances, cross_normalized_x, cross_normalized_y, &
196             cross_profiles, cross_ts_uymax, cross_ts_uymin, cross_xtext, &
197             data_output, data_output_format, data_output_masks, &
198             data_output_pr, data_output_2d_on_each_pe, disturbance_amplitude, &
199             disturbance_energy_limit, disturbance_level_b, &
200             disturbance_level_t, do2d_at_begin, do3d_at_begin, do3d_compress, &
201             do3d_comp_prec, dt, dt_averaging_input, dt_averaging_input_pr, &
202             dt_coupling, dt_data_output, dt_data_output_av, dt_disturb, &
203             dt_domask, dt_dopr, dt_dopr_listing, dt_dots, dt_do2d_xy, &
204             dt_do2d_xz, dt_do2d_yz, dt_do3d, dt_max, dt_restart, &
205             dt_run_control,end_time, force_print_header, mask_scale_x, &
206             mask_scale_y, mask_scale_z, mask_x, mask_y, mask_z, mask_x_loop, &
[759]207             mask_y_loop, mask_z_loop, netcdf_data_format, normalizing_region, &
208             npex, npey, nz_do3d, precipitation_amount_interval, &
209             profile_columns, profile_rows, restart_time, section_xy, &
210             section_xz, section_yz, skip_time_data_output, &
211             skip_time_data_output_av, skip_time_dopr, skip_time_do2d_xy, &
212             skip_time_do2d_xz, skip_time_do2d_yz, skip_time_do3d, &
213             skip_time_domask, synchronous_exchange, termination_time_needed, &
214             use_prior_plot1d_parameters, z_max_do1d, z_max_do1d_normalized, &
215             z_max_do2d
[1]216
217
[759]218    NAMELIST /envpar/  host, local_dvrserver_running, maximum_cpu_time_allowed,&
219                       maximum_parallel_io_streams, revision, return_addres, &
220                       return_username, run_identifier, tasks_per_node, &
221                       write_binary
[1]222
223!
[759]224!-- First read values of environment variables (this NAMELIST file is
225!-- generated by mrun)
226    OPEN ( 90, FILE='ENVPAR', STATUS='OLD', FORM='FORMATTED', ERR=30 )
227    READ ( 90, envpar, ERR=31, END=32 )
228    CLOSE ( 90 )
[1]229
230!
[759]231!-- Calculate the number of groups into which parallel I/O is split.
232!-- The default for files which are opened by all PEs (or where each
233!-- PE opens his own independent file) is, that all PEs are doing input/output
234!-- in parallel at the same time. This might cause performance or even more
235!-- severe problems depending on the configuration of the underlying file
236!-- system.
237!-- First, set the default:
238    IF ( maximum_parallel_io_streams == -1  .OR. &
239         maximum_parallel_io_streams > numprocs )  THEN
240       maximum_parallel_io_streams = numprocs
241    ENDIF
242!
243!-- Now calculate the number of io_blocks and the io_group to which the
244!-- respective PE belongs. I/O of the groups is done in serial, but in parallel
245!-- for all PEs belonging to the same group.
246!-- These settings are repeated in init_pegrid for the communicator comm2d,
247!-- which is not available here
248    io_blocks = numprocs / maximum_parallel_io_streams
249    io_group  = MOD( myid+1, io_blocks )
[1]250
[759]251!
252!-- Data is read in parallel by groups of PEs
253    DO  i = 0, io_blocks-1
254       IF ( i == io_group )  THEN
[559]255
[1]256!
[759]257!--       Open the NAMELIST-file which is send with this job
258          CALL check_open( 11 )
[559]259
[1]260!
[759]261!--       Read the control parameters for initialization.
262!--       The namelist "inipar" must be provided in the NAMELIST-file. If this
263!--       is not the case and the file contains - instead of "inipar" - any
264!--       other namelist, a read error is created on t3e and control is
265!--       transferred to the statement with label 10. Therefore, on t3e
266!--       machines one can not distinguish between errors produced by a wrong
267!--       "inipar" namelist or because this namelist is totally missing.
[807]268           READ ( 11, inipar, ERR=10, END=11 )
269
270#if defined ( __check )
271!
272!--       In case of a namelist file check, &inipar from the p3d file is
273!--       used. The p3d file here must be closed and the p3df file for reading
274!--       3dpar is opened.
275          IF ( check_restart == 1 ) THEN
276             CALL close_file( 11 )
277             check_restart = 2
278             CALL check_open( 11 )             
279             initializing_actions = 'read_restart_data'
280          END IF
281#endif
282           GOTO 12
283
[759]284 10       message_string = 'errors in \$inipar &or no \$inipar-namelist ' // &
285                           'found (CRAY-machines only)'
286          CALL message( 'parin', 'PA0271', 1, 2, 0, 6, 0 )
[146]287
[759]288 11       message_string = 'no \$inipar-namelist found'
289          CALL message( 'parin', 'PA0272', 1, 2, 0, 6, 0 )
290
[146]291!
[759]292!--       If required, read control parameters from restart file (produced by
293!--       a prior run). All PEs are reading from file created by PE0 (see
294!--       check_open)
[807]295
296
[759]297 12       IF ( TRIM( initializing_actions ) == 'read_restart_data' )  THEN
[809]298#if ! defined ( __check )
[759]299             CALL read_var_list
300!
301!--          The restart file will be reopened when reading the subdomain data
302             CALL close_file( 13 )
[87]303
[1]304!
[759]305!--          Increment the run count
306             runnr = runnr + 1
[807]307#endif
[759]308          ENDIF
309
[87]310!
[759]311!--       Definition of names of areas used for computing statistics. They must
312!--       be defined at this place, because they are allowed to be redefined by
313!--       the user in user_parin.
314          region = 'total domain'
[87]315
316!
[759]317!--       Read runtime parameters given by the user for this run (namelist
318!--       "d3par"). The namelist "d3par" can be omitted. In that case, default
319!--       values are used for the parameters.
320          READ ( 11, d3par, END=20 )
[87]321
322!
[759]323!--       Read control parameters for optionally used model software packages
324 20       CALL package_parin
[87]325
326!
[759]327!--       Read user-defined variables
328          CALL user_parin
[87]329
[147]330!
[759]331!--       Check in case of initial run, if the grid point numbers are well
332!--       defined and allocate some arrays which are already needed in
333!--       init_pegrid or check_parameters. During restart jobs, these arrays
334!--       will be allocated in read_var_list. All other arrays are allocated
335!--       in init_3d_model.
336          IF ( TRIM( initializing_actions ) /= 'read_restart_data' )  THEN
[667]337
[759]338             IF ( nx <= 0 )  THEN
339                WRITE( message_string, * ) 'no value or wrong value given', &
340                                           ' for nx: nx=', nx
341                CALL message( 'parin', 'PA0273', 1, 2, 0, 6, 0 )
342             ENDIF
343             IF ( ny <= 0 )  THEN
344                WRITE( message_string, * ) 'no value or wrong value given', &
345                                           ' for ny: ny=', ny
346                CALL message( 'parin', 'PA0274', 1, 2, 0, 6, 0 )
347             ENDIF
348             IF ( nz <= 0 )  THEN
349                WRITE( message_string, * ) 'no value or wrong value given', &
350                                           ' for nz: nz=', nz
351                CALL message( 'parin', 'PA0275', 1, 2, 0, 6, 0 )
352             ENDIF
353!
354!--          ATTENTION: in case of changes to the following statement please
355!--                  also check the allocate statement in routine read_var_list
356             ALLOCATE( lad(0:nz+1),pt_init(0:nz+1), q_init(0:nz+1),           &
357                       sa_init(0:nz+1), ug(0:nz+1), u_init(0:nz+1),           &
358                       v_init(0:nz+1), vg(0:nz+1),                            &
359                       hom(0:nz+1,2,pr_palm+max_pr_user,0:statistic_regions), &
360                       hom_sum(0:nz+1,pr_palm+max_pr_user,0:statistic_regions) )
[1]361
[759]362             hom = 0.0
[1]363
[759]364          ENDIF
365
[1]366!
[759]367!--       NAMELIST-file is not needed anymore
368          CALL close_file( 11 )
[1]369
[759]370       ENDIF
[809]371#if defined( __parallel ) && ! ( __check )
[759]372       CALL MPI_BARRIER( MPI_COMM_WORLD, ierr )
373#endif
374    ENDDO
375
[1]376    RETURN
377
[257]378 30 message_string = 'local file ENVPAR not found' // &
379                     '&some variables for steering may not be properly set'
380    CALL message( 'parin', 'PA0276', 0, 1, 0, 6, 0 )
[1]381    RETURN
382
[257]383 31 message_string = 'errors in local file ENVPAR' // &
384                     '&some variables for steering may not be properly set'
385    CALL message( 'parin', 'PA0277', 0, 1, 0, 6, 0 )
[1]386    RETURN
387
[257]388 32 message_string = 'no envpar-NAMELIST found in local file ENVPAR'  // &
389                     '&some variables for steering may not be properly set'
390    CALL message( 'parin', 'PA0278', 0, 1, 0, 6, 0 )
[1]391
392 END SUBROUTINE parin
Note: See TracBrowser for help on using the repository browser.