!> @file parin.f90 !--------------------------------------------------------------------------------! ! This file is part of PALM. ! ! PALM is free software: you can redistribute it and/or modify it under the terms ! of the GNU General Public License as published by the Free Software Foundation, ! either version 3 of the License, or (at your option) any later version. ! ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. ! ! You should have received a copy of the GNU General Public License along with ! PALM. If not, see . ! ! Copyright 1997-2015 Leibniz Universitaet Hannover !--------------------------------------------------------------------------------! ! ! Current revisions: ! ----------------- ! ! ! Former revisions: ! ----------------- ! $Id: parin.f90 1805 2016-04-05 16:32:34Z knoop $ ! ! 1804 2016-04-05 16:30:18Z maronga ! Removed code for parameter file check (__check) ! ! 1783 2016-03-06 18:36:17Z raasch ! +netcdf_deflate in d3par, netcdf module and variable names changed ! ! 1764 2016-02-28 12:45:19Z raasch ! cpp-statements for nesting removed, explicit settings of boundary conditions ! in nest domains, ! bugfix: npex/npey message moved from inipar to d3par ! bugfix: check of lateral boundary conditions from check_parameters to here, ! because they will be already used in init_pegrid and init_grid ! ! 1762 2016-02-25 12:31:13Z hellstea ! Introduction of nested domain feature ! ! 1691 2015-10-26 16:17:44Z maronga ! Added parameter most_method. Renamed prandtl_layer to constant_flux_layer. ! ! 1682 2015-10-07 23:56:08Z knoop ! Code annotations made doxygen readable ! ! 1560 2015-03-06 10:48:54Z keck ! +recycling_yshift ! ! 1496 2014-12-02 17:25:50Z maronga ! Renamed: "radiation -> "cloud_top_radiation" ! ! 1484 2014-10-21 10:53:05Z kanani ! Changes due to new module structure of the plant canopy model: ! canopy-model related parameters moved to new package canopy_par in ! subroutine package_parin ! ! 1429 2014-07-15 12:53:45Z knoop ! +ensemble_member_nr to prepare the random_generator for ensemble runs ! ! 1402 2014-05-09 14:25:13Z raasch ! location messages modified, batch_job included in envpar-NAMELIST ! ! 1384 2014-05-02 14:31:06Z raasch ! location messages added ! ! 1365 2014-04-22 15:03:56Z boeske ! Usage of large scale forcing enabled: ! +use_subsidence_tendencies ! ! 1361 2014-04-16 15:17:48Z hoffmann ! +call_microphysics_at_all_substeps ! ! 1359 2014-04-11 17:15:14Z hoffmann ! REAL constants provided with KIND-attribute ! ! 1353 2014-04-08 15:21:23Z heinze ! REAL constants provided with KIND-attribute ! ! 1327 2014-03-21 11:00:16Z raasch ! -data_output_format, do3d_compress, do3d_comp_prec ! ! 1320 2014-03-20 08:40:49Z raasch ! ONLY-attribute added to USE-statements, ! kind-parameters added to all INTEGER and REAL declaration statements, ! kinds are defined in new module kinds, ! old module precision_kind is removed, ! revision history before 2012 removed, ! comment fields (!:) to be used for variable explanations added to ! all variable declaration statements ! ! 1318 2014-03-17 13:35:16Z raasch ! +cpu_log_barrierwait in d3par ! ! 1301 2014-03-06 13:29:46Z heinze ! +large_scale_subsidence ! ! 1241 2013-10-30 11:36:58Z heinze ! +nudging ! +large_scale_forcing ! ! 1216 2013-08-26 09:31:42Z raasch ! +transpose_compute_overlap in inipar ! ! 1195 2013-07-01 12:27:57Z heinze ! Bugfix: allocate ref_state ! ! 1179 2013-06-14 05:57:58Z raasch ! +reference_state in inipar ! ! 1159 2013-05-21 11:58:22Z fricke ! +use_cmax ! ! 1128 2013-04-12 06:19:32Z raasch ! +background_communication in inipar ! ! 1115 2013-03-26 18:16:16Z hoffmann ! unused variables removed ! ! 1092 2013-02-02 11:24:22Z raasch ! unused variables removed ! ! 1065 2012-11-22 17:42:36Z hoffmann ! +nc, c_sedimentation, limiter_sedimentation, turbulence ! -mu_constant, mu_constant_value ! ! 1053 2012-11-13 17:11:03Z hoffmann ! necessary expansions according to the two new prognostic equations (nr, qr) ! of the two-moment cloud physics scheme and steering parameters: ! +*_init, *_surface, *_surface_initial_change, *_vertical_gradient, ! +*_vertical_gradient_level, surface_waterflux_*, ! +cloud_scheme, drizzle, mu_constant, mu_constant_value, ventilation_effect ! ! 1036 2012-10-22 13:43:42Z raasch ! code put under GPL (PALM 3.9) ! ! 1015 2012-09-27 09:23:24Z raasch ! -adjust_mixing_length ! ! 1003 2012-09-14 14:35:53Z raasch ! -grid_matching ! ! 1001 2012-09-13 14:08:46Z raasch ! -cut_spline_overshoot, long_filter_factor, overshoot_limit_*, ups_limit_* ! ! 996 2012-09-07 10:41:47Z raasch ! -use_prior_plot1d_parameters ! ! 978 2012-08-09 08:28:32Z fricke ! -km_damp_max, outflow_damping_width ! +pt_damping_factor, pt_damping_width ! +z0h_factor ! ! 964 2012-07-26 09:14:24Z raasch ! -cross_normalized_x, cross_normalized_y, cross_xtext, z_max_do1d, ! z_max_do1d_normalized ! ! 940 2012-07-09 14:31:00Z raasch ! +neutral in inipar ! ! 927 2012-06-06 19:15:04Z raasch ! +masking_method in inipar ! ! 824 2012-02-17 09:09:57Z raasch ! +curvature_solution_effects in inipar ! ! 809 2012-01-30 13:32:58Z maronga ! Bugfix: replaced .AND. and .NOT. with && and ! in the preprocessor directives ! ! 807 2012-01-25 11:53:51Z maronga ! New cpp directive "__check" implemented which is used by check_namelist_files ! ! Revision 1.1 1997/07/24 11:22:50 raasch ! Initial revision ! ! ! Description: ! ------------ !> This subroutine reads variables controling the run from the NAMELIST files !------------------------------------------------------------------------------! SUBROUTINE parin USE arrays_3d, & ONLY: pt_init, q_init, ref_state, sa_init, ug, u_init, v_init, & vg USE cloud_parameters, & ONLY: c_sedimentation, curvature_solution_effects, & limiter_sedimentation, nc_const, ventilation_effect USE control_parameters USE cpulog, & ONLY: cpu_log_barrierwait USE dvrp_variables, & ONLY: local_dvrserver_running USE grid_variables, & ONLY: dx, dy USE indices, & ONLY: nx, ny, nz USE kinds USE model_1d, & ONLY: damp_level_1d, dt_pr_1d, dt_run_control_1d, end_time_1d USE pegrid USE netcdf_interface, & ONLY: netcdf_data_format, netcdf_deflate, netcdf_precision USE pmc_interface, & ONLY: nested_run USE profil_parameter, & ONLY: cross_profiles, cross_ts_uymax, cross_ts_uymin, & profile_columns, profile_rows USE progress_bar, & ONLY : batch_job USE statistics, & ONLY: hom, hom_sum, pr_palm, region, statistic_regions IMPLICIT NONE INTEGER(iwp) :: i !< NAMELIST /inipar/ alpha_surface, background_communication, bc_e_b, bc_lr, & bc_ns, bc_p_b, bc_p_t, bc_pt_b, bc_pt_t, bc_q_b, & bc_q_t,bc_s_b, bc_s_t, bc_sa_t, bc_uv_b, bc_uv_t, & bottom_salinityflux, building_height, building_length_x, & building_length_y, building_wall_left, building_wall_south, & call_psolver_at_all_substeps, call_microphysics_at_all_substeps, & canyon_height, & canyon_width_x, canyon_width_y, canyon_wall_left, & canyon_wall_south, c_sedimentation, cfl_factor, cloud_droplets, & cloud_physics, cloud_scheme, cloud_top_radiation, & collective_wait, conserve_volume_flow, & conserve_volume_flow_mode, constant_flux_layer, & coupling_start_time, & curvature_solution_effects, cycle_mg, damp_level_1d, & dissipation_1d, & dp_external, dp_level_b, dp_smooth, dpdxy, & drizzle, dt, dt_pr_1d, dt_run_control_1d, dx, dy, dz, dz_max, & dz_stretch_factor, dz_stretch_level, end_time_1d, & ensemble_member_nr, & e_init, e_min, fft_method, galilei_transformation, humidity, & inflow_damping_height, inflow_damping_width, & inflow_disturbance_begin, inflow_disturbance_end, & initializing_actions, km_constant, & large_scale_forcing, large_scale_subsidence, & limiter_sedimentation, & loop_optimization, masking_method, mg_cycles, & mg_switch_to_pe0_level, mixing_length_1d, momentum_advec, & most_method, nc_const, netcdf_precision, neutral, ngsrb, & nsor, nsor_ini, nudging, nx, ny, nz, ocean, omega, omega_sor, & passive_scalar, phi, & prandtl_number, precipitation, psolver, pt_damping_factor, & pt_damping_width, pt_reference, pt_surface, & pt_surface_initial_change, pt_vertical_gradient, & pt_vertical_gradient_level, q_surface, q_surface_initial_change, & q_vertical_gradient, q_vertical_gradient_level, & random_generator, random_heatflux, & rayleigh_damping_factor, rayleigh_damping_height, & recycling_width, recycling_yshift, & reference_state, residual_limit, & roughness_length, sa_surface, & sa_vertical_gradient, sa_vertical_gradient_level, scalar_advec, & scalar_rayleigh_damping, & statistic_regions, subs_vertical_gradient, & subs_vertical_gradient_level, surface_heatflux, surface_pressure, & surface_scalarflux, surface_waterflux, & s_surface, s_surface_initial_change, s_vertical_gradient, & s_vertical_gradient_level, timestep_scheme, & topography, topography_grid_convention, top_heatflux, & top_momentumflux_u, top_momentumflux_v, top_salinityflux, & transpose_compute_overlap, turbulence, turbulent_inflow, & use_subsidence_tendencies, ug_surface, ug_vertical_gradient, & ug_vertical_gradient_level, use_surface_fluxes, use_cmax, & use_top_fluxes, use_ug_for_galilei_tr, use_upstream_for_tke, & uv_heights, u_bulk, u_profile, vg_surface, vg_vertical_gradient, & vg_vertical_gradient_level, v_bulk, v_profile, ventilation_effect,& wall_adjustment, wall_heatflux, wall_humidityflux, & wall_scalarflux, zeta_max, zeta_min, z0h_factor NAMELIST /d3par/ averaging_interval, averaging_interval_pr, & cpu_log_barrierwait, create_disturbances, & cross_profiles, cross_ts_uymax, cross_ts_uymin, & data_output, data_output_masks, & data_output_pr, data_output_2d_on_each_pe, disturbance_amplitude, & disturbance_energy_limit, disturbance_level_b, & disturbance_level_t, do2d_at_begin, do3d_at_begin, & dt, dt_averaging_input, dt_averaging_input_pr, & dt_coupling, dt_data_output, dt_data_output_av, dt_disturb, & dt_domask, dt_dopr, dt_dopr_listing, dt_dots, dt_do2d_xy, & dt_do2d_xz, dt_do2d_yz, dt_do3d, dt_max, dt_restart, & dt_run_control,end_time, force_print_header, mask_scale_x, & mask_scale_y, mask_scale_z, mask_x, mask_y, mask_z, mask_x_loop, & mask_y_loop, mask_z_loop, netcdf_data_format, netcdf_deflate, & normalizing_region, npex, npey, nz_do3d, & precipitation_amount_interval, profile_columns, profile_rows, & restart_time, section_xy, section_xz, section_yz, & skip_time_data_output, skip_time_data_output_av, skip_time_dopr, & skip_time_do2d_xy, skip_time_do2d_xz, skip_time_do2d_yz, & skip_time_do3d, skip_time_domask, synchronous_exchange, & termination_time_needed, z_max_do2d NAMELIST /envpar/ batch_job, host, local_dvrserver_running, & maximum_cpu_time_allowed, maximum_parallel_io_streams, & revision, return_addres, return_username, & run_identifier, tasks_per_node, write_binary ! !-- First read values of environment variables (this NAMELIST file is !-- generated by mrun) CALL location_message( 'reading environment parameters from ENVPAR', .FALSE. ) OPEN ( 90, FILE='ENVPAR', STATUS='OLD', FORM='FORMATTED', ERR=30 ) READ ( 90, envpar, ERR=31, END=32 ) CLOSE ( 90 ) CALL location_message( 'finished', .TRUE. ) ! !-- Calculate the number of groups into which parallel I/O is split. !-- The default for files which are opened by all PEs (or where each !-- PE opens his own independent file) is, that all PEs are doing input/output !-- in parallel at the same time. This might cause performance or even more !-- severe problems depending on the configuration of the underlying file !-- system. !-- First, set the default: IF ( maximum_parallel_io_streams == -1 .OR. & maximum_parallel_io_streams > numprocs ) THEN maximum_parallel_io_streams = numprocs ENDIF ! !-- Now calculate the number of io_blocks and the io_group to which the !-- respective PE belongs. I/O of the groups is done in serial, but in parallel !-- for all PEs belonging to the same group. !-- These settings are repeated in init_pegrid for the communicator comm2d, !-- which is not available here io_blocks = numprocs / maximum_parallel_io_streams io_group = MOD( myid+1, io_blocks ) CALL location_message( 'reading NAMELIST parameters from PARIN', .FALSE. ) ! !-- Data is read in parallel by groups of PEs DO i = 0, io_blocks-1 IF ( i == io_group ) THEN ! !-- Open the NAMELIST-file which is send with this job CALL check_open( 11 ) ! !-- Read the control parameters for initialization. !-- The namelist "inipar" must be provided in the NAMELIST-file. READ ( 11, inipar, ERR=10, END=11 ) GOTO 12 10 message_string = 'errors in \$inipar &or no \$inipar-namelist ' // & 'found (CRAY-machines only)' CALL message( 'parin', 'PA0271', 1, 2, 0, 6, 0 ) 11 message_string = 'no \$inipar-namelist found' CALL message( 'parin', 'PA0272', 1, 2, 0, 6, 0 ) ! !-- If required, read control parameters from restart file (produced by !-- a prior run). All PEs are reading from file created by PE0 (see !-- check_open) 12 IF ( TRIM( initializing_actions ) == 'read_restart_data' ) THEN CALL read_var_list ! !-- The restart file will be reopened when reading the subdomain data CALL close_file( 13 ) ! !-- Increment the run count runnr = runnr + 1 ENDIF ! !-- In case of nested runs, explicitly set nesting boundary conditions !-- except for the root domain. This will overwrite the user settings. IF ( nest_domain ) THEN bc_lr = 'nested' bc_ns = 'nested' bc_uv_t = 'nested' bc_pt_t = 'nested' bc_p_t = 'neumann' ENDIF ! !-- Check validity of lateral boundary conditions. This has to be done !-- here because they are already used in init_pegrid and init_grid and !-- therefore cannot be check in check_parameters IF ( bc_lr /= 'cyclic' .AND. bc_lr /= 'dirichlet/radiation' .AND. & bc_lr /= 'radiation/dirichlet' .AND. bc_lr /= 'nested' ) THEN message_string = 'unknown boundary condition: bc_lr = "' // & TRIM( bc_lr ) // '"' CALL message( 'check_parameters', 'PA0049', 1, 2, 0, 6, 0 ) ENDIF IF ( bc_ns /= 'cyclic' .AND. bc_ns /= 'dirichlet/radiation' .AND. & bc_ns /= 'radiation/dirichlet' .AND. bc_ns /= 'nested' ) THEN message_string = 'unknown boundary condition: bc_ns = "' // & TRIM( bc_ns ) // '"' CALL message( 'check_parameters', 'PA0050', 1, 2, 0, 6, 0 ) ENDIF ! !-- Set internal variables used for speed optimization in if clauses IF ( bc_lr /= 'cyclic' ) bc_lr_cyc = .FALSE. IF ( bc_lr == 'dirichlet/radiation' ) bc_lr_dirrad = .TRUE. IF ( bc_lr == 'radiation/dirichlet' ) bc_lr_raddir = .TRUE. IF ( bc_ns /= 'cyclic' ) bc_ns_cyc = .FALSE. IF ( bc_ns == 'dirichlet/radiation' ) bc_ns_dirrad = .TRUE. IF ( bc_ns == 'radiation/dirichlet' ) bc_ns_raddir = .TRUE. ! !-- Definition of names of areas used for computing statistics. They must !-- be defined at this place, because they are allowed to be redefined by !-- the user in user_parin. region = 'total domain' ! !-- Read runtime parameters given by the user for this run (namelist !-- "d3par"). The namelist "d3par" can be omitted. In that case, default !-- values are used for the parameters. READ ( 11, d3par, END=20 ) IF ( nested_run ) THEN ! !-- In nested domains, npex or npey can not be given in the d3par- !-- NAMELIST because here the PE-grids are always defined in the !-- nestpar-NAMELIST. Settings will be ignored. IF ( ( npex /= -1 ) .OR. ( npey /= -1 ) ) THEN message_string = 'npex or npey can not be given in \$d3par ' // & 'for nested runs & they will be ignored' CALL message( 'parin', 'PA0352', 0, 1, 0, 6, 0 ) ENDIF ENDIF ! !-- Read control parameters for optionally used model software packages 20 CALL package_parin ! !-- Read user-defined variables CALL user_parin ! !-- Check in case of initial run, if the grid point numbers are well !-- defined and allocate some arrays which are already needed in !-- init_pegrid or check_parameters. During restart jobs, these arrays !-- will be allocated in read_var_list. All other arrays are allocated !-- in init_3d_model. IF ( TRIM( initializing_actions ) /= 'read_restart_data' ) THEN IF ( nx <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for nx: nx=', nx CALL message( 'parin', 'PA0273', 1, 2, 0, 6, 0 ) ENDIF IF ( ny <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for ny: ny=', ny CALL message( 'parin', 'PA0274', 1, 2, 0, 6, 0 ) ENDIF IF ( nz <= 0 ) THEN WRITE( message_string, * ) 'no value or wrong value given', & ' for nz: nz=', nz CALL message( 'parin', 'PA0275', 1, 2, 0, 6, 0 ) ENDIF ! !-- ATTENTION: in case of changes to the following statement please !-- also check the allocate statement in routine read_var_list ALLOCATE( pt_init(0:nz+1), q_init(0:nz+1), & ref_state(0:nz+1), sa_init(0:nz+1), ug(0:nz+1), & u_init(0:nz+1), v_init(0:nz+1), vg(0:nz+1), & hom(0:nz+1,2,pr_palm+max_pr_user,0:statistic_regions), & hom_sum(0:nz+1,pr_palm+max_pr_user,0:statistic_regions) ) hom = 0.0_wp ENDIF ! !-- NAMELIST-file is not needed anymore CALL close_file( 11 ) ENDIF #if defined( __parallel ) CALL MPI_BARRIER( MPI_COMM_WORLD, ierr ) #endif ENDDO CALL location_message( 'finished', .TRUE. ) RETURN 30 message_string = 'local file ENVPAR not found' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0276', 0, 1, 0, 6, 0 ) RETURN 31 message_string = 'errors in local file ENVPAR' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0277', 0, 1, 0, 6, 0 ) RETURN 32 message_string = 'no envpar-NAMELIST found in local file ENVPAR' // & '&some variables for steering may not be properly set' CALL message( 'parin', 'PA0278', 0, 1, 0, 6, 0 ) END SUBROUTINE parin