SUBROUTINE parin !------------------------------------------------------------------------------! ! Current revisions: ! ----------------- ! ! ! Former revisions: ! ----------------- ! $Id: parin.f90 826 2012-02-19 03:41:34Z heinze $ ! ! 824 2012-02-17 09:09:57Z raasch ! +curvature_solution_effects in inipar ! ! 809 2012-01-30 13:32:58Z maronga ! Bugfix: replaced .AND. and .NOT. with && and ! in the preprocessor directives ! ! 807 2012-01-25 11:53:51Z maronga ! New cpp directive "__check" implemented which is used by check_namelist_files ! ! 785 2011-11-28 09:47:19Z raasch ! +scalar_rayleigh_damping in inipar ! ! 767 2011-10-14 06:39:12Z raasch ! +u_profile, v_profile, uv_heights in inipar ! ! 759 2011-09-15 13:58:31Z raasch ! +maximum_parallel_io_streams in envpar, ! splitting of parallel I/O in blocks of PEs ! ! 683 2011-02-09 14:25:15Z raasch ! +synchronous_exchange in d3par ! ! 667 2010-12-23 12:06:00Z suehring/gryschka ! Steering parameter dissipation_control added in inipar. (commented out) ! ! 622 2010-12-10 08:08:13Z raasch ! +collective_wait in inipar ! ! 600 2010-11-24 16:10:51Z raasch ! parameters moved from d3par to inipar: call_psolver_at_all_substeps, ! cfl_factor, cycle_mg, mg_cycles, mg_switch_to_pe0_level, ngsrb, nsor, ! omega_sor, prandtl_number, psolver, rayleigh_damping_factor, ! rayleigh_damping_height, residual_limit ! ! 580 2010-10-05 13:59:11Z heinze ! Renaming of ws_vertical_gradient to subs_vertical_gradient and ! ws_vertical_gradient_level to subs_vertical_gradient_level ! ! 553 2010-09-01 14:09:06Z weinreis ! parameters for masked output are replaced by arrays ! ! 493 2010-03-01 08:30:24Z raasch ! +netcdf_data_format in d3par, -netcdf_64bit, -netcdf_64bit_3d ! ! 449 2010-02-02 11:23:59Z raasch ! +wall_humidityflux, wall_scalarflux ! +ws_vertical_gradient, ws_vertical_gradient_level ! ! 410 2009-12-04 17:05:40Z letzel ! masked data output: + dt_domask, mask_01~20_x|y|z, mask_01~20_x|y|z_loop, ! mask_scale_x|y|z, masks, skip_time_domask ! ! 291 2009-04-16 12:07:26Z raasch ! +local_dvrserver_running in envpar ! Output of messages replaced by message handling routine. ! +canyon_height, canyon_width_x, canyon_width_y, canyon_wall_left, ! canyon_wall_south, conserve_volume_flow_mode, coupling_start_time, ! dp_external, dp_level_b, dp_smooth, dpdxy, u_bulk, v_bulk in inipar ! topography_grid_convention moved from userpar ! ! 197 2008-09-16 15:29:03Z raasch ! +cthf,leaf_surface_concentration, scalar_exchange_coefficient ! +inflow_damping_height, inflow_damping_width, recycling_width, ! turbulent_inflow in inipar, -skip_time_dosp in d3par, ! allocation of hom_sum moved from init_3d_model to here, ! npex, npey moved from inipar to d3par, setting of myid_char_14 removed, ! lad is allways allocated ! ! 138 2007-11-28 10:03:58Z letzel ! +canopy_mode, drag_coefficient, lad_surface, lad_vertical_gradient, ! lad_vertical_gradient_level, pch_index, plant_canopy, ! +allocation of leaf area density field ! ! 108 2007-08-24 15:10:38Z letzel ! +e_init, top_momentumflux_u|v in inipar, +dt_coupling in d3par ! ! 95 2007-06-02 16:48:38Z raasch ! +bc_sa_t, bottom_salinityflux, ocean, sa_surface, sa_vertical_gradient, ! sa_vertical_gradient_level, top_salinityflux in inipar, ! sa_init is allocated ! ! 87 2007-05-22 15:46:47Z raasch ! Size of hom increased by the maximum number of user-defined profiles, ! var_hom renamed pr_palm ! ! 82 2007-04-16 15:40:52Z raasch ! +return_addres, return_username in envpar ! ! 75 2007-03-22 09:54:05Z raasch ! +dt_max, netcdf_64bit_3d, precipitation_amount_interval in d3par, ! +loop_optimization, pt_reference in inipar, -data_output_ts, ! moisture renamed humidity ! ! 20 2007-02-26 00:12:32Z raasch ! +top_heatflux, use_top_fluxes in inipar ! ! 3 2007-02-13 11:30:58Z raasch ! +netcdf_64bit_3d in d3par, ! RCS Log replace by Id keyword, revision history cleaned up ! ! Revision 1.57 2007/02/11 13:11:22 raasch ! Values of environment variables are now read from file ENVPAR instead of ! reading them with a system call, + NAMELIST envpar ! ! Revision 1.1 1997/07/24 11:22:50 raasch ! Initial revision ! ! ! Description: ! ------------ ! This subroutine reads variables controling the run from the NAMELIST files !------------------------------------------------------------------------------! USE arrays_3d USE averaging USE cloud_parameters USE control_parameters USE dvrp_variables USE grid_variables USE indices USE model_1d USE pegrid USE profil_parameter USE statistics IMPLICIT NONE INTEGER :: i, idum NAMELIST /inipar/ adjust_mixing_length, alpha_surface, bc_e_b, bc_lr, & bc_ns, bc_p_b, bc_p_t, bc_pt_b, bc_pt_t, bc_q_b, & bc_q_t,bc_s_b, bc_s_t, bc_sa_t, bc_uv_b, bc_uv_t, & bottom_salinityflux, building_height, building_length_x, & building_length_y, building_wall_left, building_wall_south, & call_psolver_at_all_substeps, canopy_mode, canyon_height, & canyon_width_x, canyon_width_y, canyon_wall_left, & canyon_wall_south, cfl_factor, cloud_droplets, cloud_physics, & collective_wait, conserve_volume_flow, conserve_volume_flow_mode, & coupling_start_time, cthf, curvature_solution_effects, & cut_spline_overshoot, & cycle_mg, damp_level_1d, dissipation_1d, & !dissipation_control, & dp_external, dp_level_b, dp_smooth, dpdxy, drag_coefficient, & dt, dt_pr_1d, dt_run_control_1d, dx, dy, dz, dz_max, & dz_stretch_factor, dz_stretch_level, e_init, e_min, end_time_1d, & fft_method, galilei_transformation, grid_matching, humidity, & inflow_damping_height, inflow_damping_width, & inflow_disturbance_begin, inflow_disturbance_end, & initializing_actions, km_constant, km_damp_max, lad_surface, & lad_vertical_gradient, lad_vertical_gradient_level, & leaf_surface_concentration, long_filter_factor, & loop_optimization, mg_cycles, mg_switch_to_pe0_level, & mixing_length_1d, momentum_advec, netcdf_precision, ngsrb, nsor, & nsor_ini, nx, ny, nz, ocean, omega, omega_sor, & outflow_damping_width, overshoot_limit_e, overshoot_limit_pt, & overshoot_limit_u, overshoot_limit_v, overshoot_limit_w, & passive_scalar, pch_index, phi, plant_canopy, prandtl_layer, & prandtl_number, precipitation, psolver, pt_reference, pt_surface, & pt_surface_initial_change, pt_vertical_gradient, & pt_vertical_gradient_level, q_surface, q_surface_initial_change, & q_vertical_gradient, q_vertical_gradient_level, radiation, & random_generator, random_heatflux, rayleigh_damping_factor, & rayleigh_damping_height, recycling_width, residual_limit, & rif_max, rif_min, roughness_length, sa_surface, & sa_vertical_gradient, sa_vertical_gradient_level, scalar_advec, & scalar_exchange_coefficient, scalar_rayleigh_damping, & statistic_regions, subs_vertical_gradient, & subs_vertical_gradient_level, surface_heatflux, surface_pressure, & surface_scalarflux, surface_waterflux, s_surface, & s_surface_initial_change, s_vertical_gradient, & s_vertical_gradient_level, timestep_scheme, & topography, topography_grid_convention, top_heatflux, & top_momentumflux_u, top_momentumflux_v, top_salinityflux, & turbulent_inflow, ug_surface, ug_vertical_gradient, & ug_vertical_gradient_level, ups_limit_e, ups_limit_pt, & ups_limit_u, ups_limit_v, ups_limit_w, use_surface_fluxes, & use_top_fluxes, use_ug_for_galilei_tr, use_upstream_for_tke, & uv_heights, u_bulk, u_profile, vg_surface, vg_vertical_gradient, & vg_vertical_gradient_level, v_bulk, v_profile, wall_adjustment, & wall_heatflux, wall_humidityflux, wall_scalarflux NAMELIST /d3par/ averaging_interval, averaging_interval_pr, & create_disturbances, cross_normalized_x, cross_normalized_y, & cross_profiles, cross_ts_uymax, cross_ts_uymin, cross_xtext, & data_output, data_output_format, data_output_masks, & data_output_pr, data_output_2d_on_each_pe, disturbance_amplitude, & disturbance_energy_limit, disturbance_level_b, & disturbance_level_t, do2d_at_begin, do3d_at_begin, do3d_compress, & do3d_comp_prec, dt, dt_averaging_input, dt_averaging_input_pr, & dt_coupling, dt_data_output, dt_data_output_av, dt_disturb, & dt_domask, dt_dopr, dt_dopr_listing, dt_dots, dt_do2d_xy, & dt_do2d_xz, dt_do2d_yz, dt_do3d, dt_max, dt_restart, & dt_run_control,end_time, force_print_header, mask_scale_x, & mask_scale_y, mask_scale_z, mask_x, mask_y, mask_z, mask_x_loop, & mask_y_loop, mask_z_loop, netcdf_data_format, normalizing_region, & npex, npey, nz_do3d, precipitation_amount_interval, & profile_columns, profile_rows, restart_time, section_xy, & section_xz, section_yz, skip_time_data_output, & skip_time_data_output_av, skip_time_dopr, skip_time_do2d_xy, & skip_time_do2d_xz, skip_time_do2d_yz, skip_time_do3d, & skip_time_domask, synchronous_exchange, termination_time_needed, & use_prior_plot1d_parameters, z_max_do1d, z_max_do1d_normalized, & z_max_do2d NAMELIST /envpar/ host, local_dvrserver_running, maximum_cpu_time_allowed,& maximum_parallel_io_streams, revision, return_addres, & return_username, run_identifier, tasks_per_node, & write_binary ! !-- First read values of environment variables (this NAMELIST file is !-- generated by mrun) OPEN ( 90, FILE='ENVPAR', STATUS='OLD', FORM='FORMATTED', ERR=30 ) READ ( 90, envpar, ERR=31, END=32 ) CLOSE ( 90 ) ! !-- Calculate the number of groups into which parallel I/O is split. !-- The default for files which are opened by all PEs (or where each !-- PE opens his own independent file) is, that all PEs are doing input/output !-- in parallel at the same time. This might cause performance or even more !-- severe problems depending on the configuration of the underlying file !-- system. !-- First, set the default: IF ( maximum_parallel_io_streams == -1 .OR. & maximum_parallel_io_streams > numprocs ) THEN maximum_parallel_io_streams = numprocs ENDIF ! !-- Now calculate the number of io_blocks and the io_group to which the !-- respective PE belongs. I/O of the groups is done in serial, but in parallel !-- for all PEs belonging to the same group. !-- These settings are repeated in init_pegrid for the communicator comm2d, !-- which is not available here io_blocks = numprocs / maximum_parallel_io_streams io_group = MOD( myid+1, io_blocks ) ! !-- Data is read in parallel by groups of PEs DO i = 0, io_blocks-1 IF ( i == io_group ) THEN ! !-- Open the NAMELIST-file which is send with this job CALL check_open( 11 ) ! !-- Read the control parameters for initialization. !-- The namelist "inipar" must be provided in the NAMELIST-file. If this !-- is not the case and the file contains - instead of "inipar" - any !-- other namelist, a read error is created on t3e and control is !-- transferred to the statement with label 10. Therefore, on t3e !-- machines one can not distinguish between errors produced by a wrong !-- "inipar" namelist or because this namelist is totally missing. READ ( 11, inipar, ERR=10, END=11 ) #if defined ( __check ) ! !-- In case of a namelist file check, &inipar from the p3d file is !-- used. The p3d file here must be closed and the p3df file for reading !-- 3dpar is opened. IF ( check_restart == 1 ) THEN CALL close_file( 11 ) check_restart = 2 CALL check_open( 11 ) initializing_actions = 'read_restart_data' END IF #endif GOTO 12 10 message_string = 'errors in \$inipar &or no \$inipar-namelist ' // & 'found (CRAY-machines only)' CALL message( 'parin', 'PA0271', 1, 2, 0, 6, 0 ) 11 message_string = 'no \$inipar-namelist found' CALL message( 'parin', 'PA0272', 1, 2, 0, 6, 0 ) ! !-- If required, read control parameters from restart file (produced by !-- a prior run). All PEs are reading from file created by PE0 (see !-- check_open) 12 IF ( TRIM( initializing_actions ) == 'read_restart_data' ) THEN #if ! defined ( __check ) CALL read_var_list ! !-- The restart file will be reopened when reading the subdomain data CALL close_file( 13 ) ! !-- Increment the run count runnr = runnr + 1 #endif ENDIF ! !-- Definition of names of areas used for computing statistics. They must !-- be defined at this place, because they are allowed to be redefined by !-- the user in user_parin. region = 'total domain' ! !-- Read runtime parameters given by the user for this run (namelist !-- "d3par"). The namelist "d3par" can be omitted. In that case, default !-- values are used for the parameters. READ ( 11, d3par, END=20 ) ! !-- Read control parameters for optionally used model software packages 20 CALL package_parin ! !-- Read user-defined variables CALL user_parin ! !-- Check in case of initial run, if the grid point numbers are well !-- defined and allocate some arrays which are already needed in !-- init_pegrid or check_parameters. During restart jobs, these arrays !-- will be allocated in read_var_list. All other arrays are allocated !-- in init_3d_model. IF ( TRIM( initializing_actions ) /= 'read_restart_data' ) THEN IF ( nx <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for nx: nx=', nx CALL message( 'parin', 'PA0273', 1, 2, 0, 6, 0 ) ENDIF IF ( ny <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for ny: ny=', ny CALL message( 'parin', 'PA0274', 1, 2, 0, 6, 0 ) ENDIF IF ( nz <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for nz: nz=', nz CALL message( 'parin', 'PA0275', 1, 2, 0, 6, 0 ) ENDIF ! !-- ATTENTION: in case of changes to the following statement please !-- also check the allocate statement in routine read_var_list ALLOCATE( lad(0:nz+1),pt_init(0:nz+1), q_init(0:nz+1), & sa_init(0:nz+1), ug(0:nz+1), u_init(0:nz+1), & v_init(0:nz+1), vg(0:nz+1), & hom(0:nz+1,2,pr_palm+max_pr_user,0:statistic_regions), & hom_sum(0:nz+1,pr_palm+max_pr_user,0:statistic_regions) ) hom = 0.0 ENDIF ! !-- NAMELIST-file is not needed anymore CALL close_file( 11 ) ENDIF #if defined( __parallel ) && ! ( __check ) CALL MPI_BARRIER( MPI_COMM_WORLD, ierr ) #endif ENDDO RETURN 30 message_string = 'local file ENVPAR not found' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0276', 0, 1, 0, 6, 0 ) RETURN 31 message_string = 'errors in local file ENVPAR' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0277', 0, 1, 0, 6, 0 ) RETURN 32 message_string = 'no envpar-NAMELIST found in local file ENVPAR' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0278', 0, 1, 0, 6, 0 ) END SUBROUTINE parin