Changeset 1764 for palm/trunk/SOURCE
- Timestamp:
- Feb 28, 2016 12:45:19 PM (9 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 18 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/Makefile
r1763 r1764 20 20 # Current revisions: 21 21 # ------------------ 22 # 22 # update dependency of check_parameters, header, local_stop, 23 # pmc_handle_communicator 23 24 # 24 25 # Former revisions: … … 327 328 check_for_restart.o: modules.o mod_kinds.o 328 329 check_open.o: modules.o mod_kinds.o mod_particle_attributes.o 329 check_parameters.o: modules.o mod_kinds.o subsidence.oland_surface_model.o\330 plant_canopy_model.o radiation_model.o330 check_parameters.o: modules.o mod_kinds.o land_surface_model.o\ 331 plant_canopy_model.o pmc_interface.o radiation_model.o subsidence.o 331 332 close_file.o: modules.o mod_kinds.o 332 333 compute_vpt.o: modules.o mod_kinds.o … … 363 364 global_min_max.o: modules.o mod_kinds.o 364 365 header.o: modules.o cpulog.o mod_kinds.o land_surface_model.o\ 365 plant_canopy_model.o radiation_model.o subsidence.o366 plant_canopy_model.o pmc_interface.o radiation_model.o subsidence.o 366 367 impact_of_latent_heat.o: modules.o mod_kinds.o 367 368 inflow_turbulence.o: modules.o cpulog.o mod_kinds.o … … 385 386 local_flush.o: mod_kinds.o 386 387 local_getenv.o: modules.o mod_kinds.o 387 local_stop.o: modules.o mod_kinds.o 388 local_stop.o: modules.o mod_kinds.o pmc_interface.o 388 389 local_tremain.o: modules.o cpulog.o mod_kinds.o 389 390 local_tremain_ini.o: modules.o cpulog.o mod_kinds.o … … 419 420 lpm_write_restart_file.o: modules.o mod_kinds.o mod_particle_attributes.o 420 421 ls_forcing.o: modules.o cpulog.o mod_kinds.o 421 message.o: modules.o mod_kinds.o 422 message.o: modules.o mod_kinds.o pmc_interface.o 422 423 microphysics.o: modules.o cpulog.o mod_kinds.o 423 424 modules.o: modules.f90 mod_kinds.o … … 430 431 palm.o: modules.o cpulog.o ls_forcing.o mod_kinds.o nudging.o\ 431 432 pmc_interface.o surface_layer_fluxes.o 432 parin.o: modules.o cpulog.o mod_kinds.o p rogress_bar.o433 parin.o: modules.o cpulog.o mod_kinds.o pmc_interface.o progress_bar.o 433 434 plant_canopy_model.o: modules.o mod_kinds.o 434 435 pmc_interface.o: modules.o mod_kinds.o pmc_client.o pmc_general.o\ 435 436 pmc_handle_communicator.o pmc_mpi_wrapper.o pmc_server.o 436 pmc_client.o: pmc_general.o pmc_handle_communicator.o pmc_mpi_wrapper.o 437 pmc_handle_communicator.o: pmc_general.o 437 pmc_client.o: mod_kinds.o pmc_general.o pmc_handle_communicator.o\ 438 pmc_mpi_wrapper.o 439 pmc_general.o: mod_kinds.o 440 pmc_handle_communicator.o: modules.o mod_kinds.o pmc_general.o 438 441 pmc_mpi_wrapper.o: pmc_handle_communicator.o 439 442 pmc_server.o: pmc_general.o pmc_handle_communicator.o pmc_mpi_wrapper.o -
palm/trunk/SOURCE/boundary_conds.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! index bug for u_p at left outflow removed 22 22 ! 23 23 ! Former revisions: … … 820 820 !-- Top boundary at the outflow 821 821 IF ( ibc_uv_t == 0 ) THEN 822 u_p(nzt+1,:, -1)= u_init(nzt+1)822 u_p(nzt+1,:,0) = u_init(nzt+1) 823 823 v_p(nzt+1,:,-1) = v_init(nzt+1) 824 824 ELSE 825 u_p(nzt+1,:, -1) = u_p(nzt,:,-1)825 u_p(nzt+1,:,0) = u_p(nzt,:,0) 826 826 v_p(nzt+1,:,-1) = v_p(nzt,:,-1) 827 827 ENDIF -
palm/trunk/SOURCE/check_parameters.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! output of nest id in run description header, 22 ! bugfix: check of validity of lateral boundary conditions moved to parin 22 23 ! 23 24 ! Former revisions: … … 312 313 USE pegrid 313 314 USE plant_canopy_model_mod 315 USE pmc_interface, & 316 ONLY: cpl_id, nested_run 314 317 USE profil_parameter 315 318 USE radiation_model_mod … … 328 331 CHARACTER (LEN=8) :: date !< 329 332 CHARACTER (LEN=10) :: time !< 333 CHARACTER (LEN=10) :: ensemble_string !< 334 CHARACTER (LEN=15) :: nest_string !< 330 335 CHARACTER (LEN=40) :: coupling_string !< 331 336 CHARACTER (LEN=100) :: action !< … … 583 588 ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN 584 589 coupling_string = ' coupled (ocean)' 585 ENDIF 586 590 ENDIF 587 591 IF ( ensemble_member_nr /= 0 ) THEN 588 WRITE ( run_description_header, & 589 '(A,2X,A,2X,A,A,A,I2.2,A,2X,A,I2.2,2X,A,A,2X,A,1X,A)' ) & 590 TRIM( version ), TRIM( revision ), 'run: ', & 591 TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ), & 592 'en-no: ', ensemble_member_nr,'host: ', TRIM( host ), & 593 run_date, run_time 592 WRITE( ensemble_string, '(2X,A,I2.2)' ) 'en-no: ', ensemble_member_nr 594 593 ELSE 595 WRITE ( run_description_header, & 596 '(A,2X,A,2X,A,A,A,I2.2,A,2X,A,A,2X,A,1X,A)' ) & 597 TRIM( version ), TRIM( revision ), 'run: ', & 598 TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ), & 599 'host: ', TRIM( host ), run_date, run_time 600 ENDIF 594 ensemble_string = '' 595 ENDIF 596 IF ( nested_run ) THEN 597 WRITE( nest_string, '(2X,A,I2.2)' ) 'nest-id: ', cpl_id 598 ELSE 599 nest_string = '' 600 ENDIF 601 602 WRITE ( run_description_header, & 603 '(A,2X,A,2X,A,A,A,I2.2,A,A,A,2X,A,A,2X,A,1X,A)' ) & 604 TRIM( version ), TRIM( revision ), 'run: ', & 605 TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ), & 606 TRIM( nest_string ), TRIM( ensemble_string), 'host: ', TRIM( host ), & 607 run_date, run_time 608 601 609 ! 602 610 !-- Check the general loop optimization method … … 1756 1764 ! 1757 1765 !-- Check boundary conditions and set internal variables: 1758 !-- Lateral boundary conditions 1759 IF ( bc_lr /= 'cyclic' .AND. bc_lr /= 'dirichlet/radiation' .AND. & 1760 bc_lr /= 'radiation/dirichlet' .AND. bc_lr /= 'nested' ) THEN 1761 message_string = 'unknown boundary condition: bc_lr = "' // & 1762 TRIM( bc_lr ) // '"' 1763 CALL message( 'check_parameters', 'PA0049', 1, 2, 0, 6, 0 ) 1764 ENDIF 1765 IF ( bc_ns /= 'cyclic' .AND. bc_ns /= 'dirichlet/radiation' .AND. & 1766 bc_ns /= 'radiation/dirichlet' .AND. bc_ns /= 'nested' ) THEN 1767 message_string = 'unknown boundary condition: bc_ns = "' // & 1768 TRIM( bc_ns ) // '"' 1769 CALL message( 'check_parameters', 'PA0050', 1, 2, 0, 6, 0 ) 1770 ENDIF 1771 1772 ! 1773 !-- Internal variables used for speed optimization in if clauses 1774 IF ( bc_lr /= 'cyclic' ) bc_lr_cyc = .FALSE. 1775 IF ( bc_lr == 'dirichlet/radiation' ) bc_lr_dirrad = .TRUE. 1776 IF ( bc_lr == 'radiation/dirichlet' ) bc_lr_raddir = .TRUE. 1777 IF ( bc_ns /= 'cyclic' ) bc_ns_cyc = .FALSE. 1778 IF ( bc_ns == 'dirichlet/radiation' ) bc_ns_dirrad = .TRUE. 1779 IF ( bc_ns == 'radiation/dirichlet' ) bc_ns_raddir = .TRUE. 1780 1766 !-- Attention: the lateral boundary conditions have been already checked in 1767 !-- parin 1781 1768 ! 1782 1769 !-- Non-cyclic lateral boundaries require the multigrid method and Piascek- -
palm/trunk/SOURCE/header.f90
r1698 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! output of nesting informations 22 22 ! 23 23 ! Former revisions: … … 205 205 ! Description: 206 206 ! ------------ 207 !> Writing a header with all important information about the actualrun.207 !> Writing a header with all important information about the current run. 208 208 !> This subroutine is called three times, two times at the beginning 209 209 !> (writing information on files RUN_CONTROL and HEADER) and one time at the … … 269 269 plant_canopy 270 270 271 USE pmc_interface, & 272 ONLY: cpl_id, cpl_parent_id, cpl_name, lower_left_coord_x, & 273 lower_left_coord_y, nested_run, nesting_mode 274 271 275 USE radiation_model_mod, & 272 276 ONLY: albedo, albedo_type, albedo_type_name, constant_albedo, & … … 366 370 !-- Determine kind of model run 367 371 IF ( TRIM( initializing_actions ) == 'read_restart_data' ) THEN 368 run_classification = ' 3D -restart run'372 run_classification = 'restart run' 369 373 ELSEIF ( TRIM( initializing_actions ) == 'cyclic_fill' ) THEN 370 run_classification = ' 3D -run with cyclic fill of 3D - prerun data'374 run_classification = 'run with cyclic fill of 3D - prerun data' 371 375 ELSEIF ( INDEX( initializing_actions, 'set_constant_profiles' ) /= 0 ) THEN 372 run_classification = ' 3D -run without 1D - prerun'376 run_classification = 'run without 1D - prerun' 373 377 ELSEIF ( INDEX( initializing_actions, 'set_1d-model_profiles' ) /= 0 ) THEN 374 run_classification = ' 3D -run with 1D - prerun'378 run_classification = 'run with 1D - prerun' 375 379 ELSEIF ( INDEX( initializing_actions, 'by_user' ) /=0 ) THEN 376 run_classification = ' 3D -run initialized by user'380 run_classification = 'run initialized by user' 377 381 ELSE 378 382 message_string = ' unknown action(s): ' // TRIM( initializing_actions ) 379 383 CALL message( 'header', 'PA0191', 0, 0, 0, 6, 0 ) 380 384 ENDIF 385 IF ( nested_run ) run_classification = 'nested ' // run_classification 381 386 IF ( ocean ) THEN 382 387 run_classification = 'ocean - ' // run_classification … … 444 449 IF ( num_acc_per_node /= 0 ) WRITE ( io, 120 ) num_acc_per_node 445 450 #endif 451 452 ! 453 !-- Nesting informations 454 IF ( nested_run ) THEN 455 WRITE ( io, 600 ) cpl_id, TRIM( cpl_name ), cpl_parent_id, & 456 nesting_mode, lower_left_coord_x, lower_left_coord_y 457 ENDIF 446 458 WRITE ( io, 99 ) 447 459 … … 2399 2411 513 FORMAT (' --> Scalar advection via Wicker-Skamarock-Scheme 5th order ' // & 2400 2412 '+ monotonic adjustment') 2401 2413 600 FORMAT (/' Nesting informations:'/ & 2414 ' Nest id / name: ',I2.2,' / ',A,' (parent id: ',I2.2,')'/ & 2415 ' Nesting mode: ',A/ & 2416 ' Lower left corner coordinates: ','x = ',F8.2,' m, y = ',F8.2,' m'/) 2402 2417 2403 2418 END SUBROUTINE header -
palm/trunk/SOURCE/init_3d_model.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! bugfix: increase size of volume_flow_area_l and volume_flow_initial_l by 1 22 22 ! 23 23 ! Former revisions: … … 302 302 INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: ngp_2dh_s_inner_l !< 303 303 304 REAL(wp), DIMENSION(1: 2) :: volume_flow_area_l !<305 REAL(wp), DIMENSION(1: 2) :: volume_flow_initial_l !<304 REAL(wp), DIMENSION(1:3) :: volume_flow_area_l !< 305 REAL(wp), DIMENSION(1:3) :: volume_flow_initial_l !< 306 306 307 307 REAL(wp), DIMENSION(:), ALLOCATABLE :: mean_surface_level_height_l !< -
palm/trunk/SOURCE/init_pegrid.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! cpp-statements for nesting removed 22 22 ! 23 23 ! Former revisions: … … 151 151 USE pegrid 152 152 153 #if defined( PMC_ACTIVE )154 153 USE pmc_interface, & 155 ONLY: cpl_npex,cpl_npey 156 #endif 154 ONLY: cpl_npex, cpl_npey, nested_run 157 155 158 156 USE transpose_indices, & … … 215 213 CALL location_message( 'creating virtual PE grids + MPI derived data types', & 216 214 .FALSE. ) 217 #if defined( PMC_ACTIVE ) 218 ! 219 !-- In case of nested-domain runs, the processor grid is explicitly given 220 !-- by the user in the nestpar-NAMELIST 221 pdims(1) = cpl_npex 222 pdims(2) = cpl_npey 223 #else 224 ! 225 !-- Determine the processor topology or check it, if prescribed by the user 226 IF ( npex == -1 .AND. npey == -1 ) THEN 227 228 ! 229 !-- Automatic determination of the topology 230 numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) ) 231 pdims(1) = MAX( numproc_sqr , 1 ) 232 DO WHILE ( MOD( numprocs , pdims(1) ) /= 0 ) 233 pdims(1) = pdims(1) - 1 234 ENDDO 235 pdims(2) = numprocs / pdims(1) 236 237 ELSEIF ( npex /= -1 .AND. npey /= -1 ) THEN 238 239 ! 240 !-- Prescribed by user. Number of processors on the prescribed topology 241 !-- must be equal to the number of PEs available to the job 242 IF ( ( npex * npey ) /= numprocs ) THEN 243 WRITE( message_string, * ) 'number of PEs of the prescribed ', & 215 216 IF ( nested_run ) THEN 217 ! 218 !-- In case of nested-domain runs, the processor grid is explicitly given 219 !-- by the user in the nestpar-NAMELIST 220 pdims(1) = cpl_npex 221 pdims(2) = cpl_npey 222 223 ELSE 224 ! 225 !-- Determine the processor topology or check it, if prescribed by the user 226 IF ( npex == -1 .AND. npey == -1 ) THEN 227 228 ! 229 !-- Automatic determination of the topology 230 numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) ) 231 pdims(1) = MAX( numproc_sqr , 1 ) 232 DO WHILE ( MOD( numprocs , pdims(1) ) /= 0 ) 233 pdims(1) = pdims(1) - 1 234 ENDDO 235 pdims(2) = numprocs / pdims(1) 236 237 ELSEIF ( npex /= -1 .AND. npey /= -1 ) THEN 238 239 ! 240 !-- Prescribed by user. Number of processors on the prescribed topology 241 !-- must be equal to the number of PEs available to the job 242 IF ( ( npex * npey ) /= numprocs ) THEN 243 WRITE( message_string, * ) 'number of PEs of the prescribed ', & 244 244 'topology (', npex*npey,') does not match & the number of ', & 245 245 'PEs available to the job (', numprocs, ')' 246 CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 ) 247 ENDIF 248 pdims(1) = npex 249 pdims(2) = npey 250 251 ELSE 252 ! 253 !-- If the processor topology is prescribed by the user, the number of 254 !-- PEs must be given in both directions 255 message_string = 'if the processor topology is prescribed by the, ' // & 256 ' user& both values of "npex" and "npey" must be given ' // & 257 'in the &NAMELIST-parameter file' 258 CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 ) 259 260 ENDIF 261 #endif 246 CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 ) 247 ENDIF 248 pdims(1) = npex 249 pdims(2) = npey 250 251 ELSE 252 ! 253 !-- If the processor topology is prescribed by the user, the number of 254 !-- PEs must be given in both directions 255 message_string = 'if the processor topology is prescribed by th' // & 256 'e user& both values of "npex" and "npey" must be given' // & 257 ' in the &NAMELIST-parameter file' 258 CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 ) 259 260 ENDIF 261 262 ENDIF 263 262 264 263 265 ! … … 1082 1084 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1083 1085 outflow_l = .TRUE. 1084 #if defined( PMC_ACTIVE )1085 1086 ELSEIF ( bc_lr == 'nested' ) THEN 1086 1087 nest_bound_l = .TRUE. 1087 #endif1088 1088 ENDIF 1089 1089 ENDIF … … 1094 1094 ELSEIF ( bc_lr == 'radiation/dirichlet' ) THEN 1095 1095 inflow_r = .TRUE. 1096 #if defined( PMC_ACTIVE )1097 1096 ELSEIF ( bc_lr == 'nested' ) THEN 1098 1097 nest_bound_r = .TRUE. 1099 #endif1100 1098 ENDIF 1101 1099 ENDIF … … 1106 1104 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1107 1105 inflow_s = .TRUE. 1108 #if defined( PMC_ACTIVE )1109 1106 ELSEIF ( bc_ns == 'nested' ) THEN 1110 1107 nest_bound_s = .TRUE. 1111 #endif1112 1108 ENDIF 1113 1109 ENDIF … … 1118 1114 ELSEIF ( bc_ns == 'radiation/dirichlet' ) THEN 1119 1115 outflow_n = .TRUE. 1120 #if defined( PMC_ACTIVE )1121 1116 ELSEIF ( bc_ns == 'nested' ) THEN 1122 1117 nest_bound_n = .TRUE. 1123 #endif1124 1118 ENDIF 1125 1119 ENDIF -
palm/trunk/SOURCE/local_stop.f90
r1683 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! abort with MPI_COMM_WORLD added, nested runs always abort with MPI_ABORT 22 22 ! 23 23 ! Former revisions: … … 55 55 56 56 57 USE pegrid58 59 57 USE control_parameters, & 60 58 ONLY: abort_mode, coupling_mode, coupling_mode_remote, dt_restart, & … … 62 60 terminate_run, time_restart 63 61 62 USE pegrid 63 64 USE pmc_interface, & 65 ONLY: nested_run 64 66 65 67 #if defined( __parallel ) && ! defined ( __check ) 66 68 IF ( coupling_mode == 'uncoupled' ) THEN 67 IF ( abort_mode == 1 ) THEN 68 CALL MPI_FINALIZE( ierr ) 69 STOP 70 ELSEIF ( abort_mode == 2 ) THEN 71 CALL MPI_ABORT( comm2d, 9999, ierr ) 69 IF ( nested_run ) THEN 70 ! 71 !-- Workaround: If any of the nested model crashes, it aborts the whole 72 !-- run with MPI_ABORT, regardless of the reason given by abort_mode 73 CALL MPI_ABORT( MPI_COMM_WORLD, 9999, ierr ) 74 ELSE 75 IF ( abort_mode == 1 ) THEN 76 CALL MPI_FINALIZE( ierr ) 77 STOP 78 ELSEIF ( abort_mode == 2 ) THEN 79 CALL MPI_ABORT( comm2d, 9999, ierr ) 80 ELSEIF ( abort_mode == 3 ) THEN 81 CALL MPI_ABORT( MPI_COMM_WORLD, 9999, ierr ) 82 ENDIF 72 83 ENDIF 73 84 ELSE -
palm/trunk/SOURCE/message.f90
r1683 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! nest id added to header string, add linefeed to stdout to get messages better 22 ! seperatedvfrom the location messages, 23 ! in case of nested runs, location messages are given only by the root domain 22 24 ! 23 25 ! Former revisions: … … 55 57 !> Meaning of formal parameters: 56 58 !> requested_action: 0 - continue, 1 - abort by stop, 2 - abort by mpi_abort 59 !> 3 - abort by mpi_abort using MPI_COMM_WORLD 57 60 !> message_level: 0 - informative, 1 - warning, 2 - error 58 61 !> output_on_pe: -1 - all, else - output on specified PE … … 63 66 message_level, output_on_pe, file_id, flush ) 64 67 65 66 68 USE control_parameters, & 67 69 ONLY: abort_mode, message_string … … 71 73 USE pegrid 72 74 75 USE pmc_interface, & 76 ONLY: cpl_id, nested_run 77 73 78 IMPLICIT NONE 74 79 75 80 CHARACTER(LEN=6) :: message_identifier !< 81 CHARACTER(LEN=20) :: nest_string !< nest id information 76 82 CHARACTER(LEN=*) :: routine_name !< 77 83 CHARACTER(LEN=200) :: header_string !< … … 94 100 95 101 ! 102 !-- In case of nested runs create the nest id informations 103 IF ( nested_run ) THEN 104 WRITE( nest_string, '(1X,A,I2.2)' ) 'from nest-id ', cpl_id 105 ELSE 106 nest_string = '' 107 ENDIF 108 ! 96 109 !-- Create the complete output string, starting with the message level 97 110 IF ( message_level == 0 ) THEN 98 header_string = '--- informative message --- ID:' 111 header_string = '--- informative message' // TRIM(nest_string) // & 112 ' --- ID:' 99 113 ELSEIF ( message_level == 1 ) THEN 100 header_string = '+++ warning message --- ID:'114 header_string = '+++ warning message' // TRIM(nest_string) // ' --- ID:' 101 115 ELSEIF ( message_level == 2 ) THEN 102 header_string = '+++ error message --- ID:'116 header_string = '+++ error message' // TRIM(nest_string) // ' --- ID:' 103 117 ELSE 104 WRITE( header_string,'(A,I2)' ) '+++ unknown message level: ', & 118 WRITE( header_string,'(A,I2)' ) '+++ unknown message level' // & 119 TRIM(nest_string) // ': ', & 105 120 message_level 106 121 ENDIF … … 118 133 information_string_2 = 'http://palm.muk.uni-hannover.de/trac/wiki/doc' // & 119 134 '/app/errmsg#' // message_identifier 120 END 135 ENDIF 121 136 122 137 … … 147 162 ! 148 163 !-- Output on stdout 149 WRITE( *, '( A/)' ) TRIM( header_string )164 WRITE( *, '(//A/)' ) TRIM( header_string ) 150 165 ! 151 166 !-- Cut message string into pieces and output one piece per line. … … 219 234 220 235 USE, INTRINSIC :: ISO_FORTRAN_ENV, & 221 ONLY 236 ONLY: OUTPUT_UNIT 222 237 223 238 USE pegrid, & 224 ONLY : myid 239 ONLY: myid 240 241 USE pmc_interface, & 242 ONLY: cpl_id 225 243 226 244 IMPLICIT NONE … … 229 247 LOGICAL :: advance !< switch for advancing/noadvancing I/O 230 248 249 ! 250 !-- Output for nested runs only on the root domain 251 IF ( cpl_id /= 1 ) RETURN 231 252 232 253 IF ( myid == 0 ) THEN -
palm/trunk/SOURCE/modules.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! some reformatting 22 22 ! 23 23 ! Former revisions: … … 713 713 lunudge = .FALSE., lvnudge = .FALSE., lwnudge = .FALSE., & 714 714 masking_method = .FALSE., mg_switch_to_pe0 = .FALSE., & 715 monotonic_adjustment = .FALSE., & 716 nest_bound_l = .FALSE., nest_bound_n = .FALSE., & 717 nest_bound_r = .FALSE., nest_bound_s = .FALSE., & 718 nest_domain = .FALSE., & 719 neutral = .FALSE., nudging = .FALSE., & 715 monotonic_adjustment = .FALSE. 716 LOGICAL :: nest_bound_l = .FALSE. !< nested boundary on left side 717 LOGICAL :: nest_bound_n = .FALSE. !< nested boundary on north side 718 LOGICAL :: nest_bound_r = .FALSE. !< nested boundary on right side 719 LOGICAL :: nest_bound_s = .FALSE. !< nested boundary on south side 720 LOGICAL :: nest_domain = .FALSE. !< domain is nested into a parent domain 721 LOGICAL :: neutral = .FALSE., nudging = .FALSE., & 720 722 ocean = .FALSE., on_device = .FALSE., & 721 723 outflow_l = .FALSE., outflow_n = .FALSE., outflow_r = .FALSE., & -
palm/trunk/SOURCE/palm.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! cpp-statements for nesting removed, communicator settings cleaned up 22 22 ! 23 23 ! Former revisions: … … 148 148 USE pegrid 149 149 150 #if defined( PMC_ACTIVE )151 150 USE pmc_interface, & 152 ONLY: cpl_id, pmci_init, pmci_modelconfiguration 153 #endif 151 ONLY: cpl_id, nested_run, pmci_init, pmci_modelconfiguration 154 152 155 153 USE statistics, & … … 185 183 CALL MPI_INIT( ierr ) 186 184 187 #if defined( PMC_ACTIVE )188 185 ! 189 186 !-- Initialize the coupling for nested-domain runs 187 !-- comm_palm is the communicator which includes all PEs (MPI processes) 188 !-- available for this (nested) model. If it is not a nested run, comm_palm 189 !-- is returned as MPI_COMM_WORLD 190 190 CALL pmci_init( comm_palm ) 191 191 comm2d = comm_palm 192 193 IF ( cpl_id >= 2 ) THEN 194 nest_domain = .TRUE. 195 WRITE( coupling_char, '(A1,I1.1)') '_', cpl_id 196 ENDIF 197 198 CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr ) 199 CALL MPI_COMM_RANK( comm_palm, myid, ierr ) 200 #else 201 CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr ) 202 CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr ) 203 comm_palm = MPI_COMM_WORLD 204 comm2d = MPI_COMM_WORLD 205 ! 206 !-- Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm 207 !-- will be splitted in init_coupling) 208 CALL init_coupling 209 #endif 192 ! 193 !-- Get the (preliminary) number of MPI processes and the local PE-id (in case 194 !-- of a further communicator splitting in init_coupling, these numbers will 195 !-- be changed in init_pegrid). 196 IF ( nested_run ) THEN 197 !-- TO_DO: move the following two settings somewehere to the pmc_interface 198 IF ( cpl_id >= 2 ) THEN 199 nest_domain = .TRUE. 200 WRITE( coupling_char, '(A1,I1.1)') '_', cpl_id 201 ENDIF 202 203 CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr ) 204 CALL MPI_COMM_RANK( comm_palm, myid, ierr ) 205 206 ELSE 207 208 CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr ) 209 CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr ) 210 ! 211 !-- Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm 212 !-- will be splitted in init_coupling) 213 CALL init_coupling 214 ENDIF 210 215 #endif 211 216 … … 316 321 CALL init_3d_model 317 322 318 #if defined( PMC_ACTIVE )319 323 ! 320 324 !-- Coupling protocol setup for nested-domain runs 321 CALL pmci_modelconfiguration 322 #endif 325 IF ( nested_run ) THEN 326 CALL pmci_modelconfiguration 327 ENDIF 323 328 324 329 ! -
palm/trunk/SOURCE/parin.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! cpp-statements for nesting removed, explicit settings of boundary conditions 22 ! in nest domains, 23 ! bugfix: npex/npey message moved from inipar to d3par 24 ! bugfix: check of lateral boundary conditions from check_parameters to here, 25 ! because they will be already used in init_pegrid and init_grid 22 26 ! 23 27 ! Former revisions: … … 193 197 ONLY: nx, ny, nz 194 198 199 USE kinds 200 195 201 USE model_1d, & 196 202 ONLY: damp_level_1d, dt_pr_1d, dt_run_control_1d, end_time_1d 197 203 198 204 USE pegrid 205 206 USE pmc_interface, & 207 ONLY: nested_run 199 208 200 209 USE profil_parameter, & … … 345 354 READ ( 11, inipar, ERR=10, END=11 ) 346 355 347 #if defined ( PMC_ACTIVE )348 !349 !-- In nested domains, npex or npey must not be given in \$inipar350 !-- because here the PE-grids are always defined in the nestpar-NAMELIST351 IF ( ( npex /= -1 ) .OR. ( npey /= -1 ) ) THEN352 message_string = 'npex or npey must not be given in \$inipar ' // &353 'in nested domains'354 CALL message( 'parin', 'PAXXXX', 1, 2, 0, 6, 0 )355 ENDIF356 #else357 !358 !-- Make sure that no nesting boundary conditions are defined if359 !-- PMC_ACTIVE is not defined. Otherwise initiate abort.360 IF ( ( bc_lr == 'nested' ) .OR. ( bc_ns == 'nested' ) .OR. ( bc_uv_t == 'nested' ) .OR. &361 ( bc_pt_t == 'nested' ) .OR. ( bc_q_t == 'nested') .OR. ( bc_sa_t == 'nested') .OR. &362 ( bc_p_t == 'nested' ) ) THEN363 message_string = 'Nested boundary conditions are not allowed ' // &364 'since the cpp flag PMC_ACTIVE is not set'365 CALL message( 'parin', 'PAXXXX', 1, 2, 0, 6, 0 )366 ENDIF367 #endif368 369 356 #if defined ( __check ) 370 357 ! … … 406 393 407 394 ! 395 !-- In case of nested runs, explicitly set nesting boundary conditions 396 !-- except for the root domain. This will overwrite the user settings. 397 IF ( nest_domain ) THEN 398 bc_lr = 'nested' 399 bc_ns = 'nested' 400 bc_uv_t = 'nested' 401 bc_pt_t = 'nested' 402 bc_p_t = 'neumann' 403 ENDIF 404 ! 405 !-- Check validity of lateral boundary conditions. This has to be done 406 !-- here because they are already used in init_pegrid and init_grid and 407 !-- therefore cannot be check in check_parameters 408 IF ( bc_lr /= 'cyclic' .AND. bc_lr /= 'dirichlet/radiation' .AND. & 409 bc_lr /= 'radiation/dirichlet' .AND. bc_lr /= 'nested' ) THEN 410 message_string = 'unknown boundary condition: bc_lr = "' // & 411 TRIM( bc_lr ) // '"' 412 CALL message( 'check_parameters', 'PA0049', 1, 2, 0, 6, 0 ) 413 ENDIF 414 IF ( bc_ns /= 'cyclic' .AND. bc_ns /= 'dirichlet/radiation' .AND. & 415 bc_ns /= 'radiation/dirichlet' .AND. bc_ns /= 'nested' ) THEN 416 message_string = 'unknown boundary condition: bc_ns = "' // & 417 TRIM( bc_ns ) // '"' 418 CALL message( 'check_parameters', 'PA0050', 1, 2, 0, 6, 0 ) 419 ENDIF 420 421 ! 422 !-- Set internal variables used for speed optimization in if clauses 423 IF ( bc_lr /= 'cyclic' ) bc_lr_cyc = .FALSE. 424 IF ( bc_lr == 'dirichlet/radiation' ) bc_lr_dirrad = .TRUE. 425 IF ( bc_lr == 'radiation/dirichlet' ) bc_lr_raddir = .TRUE. 426 IF ( bc_ns /= 'cyclic' ) bc_ns_cyc = .FALSE. 427 IF ( bc_ns == 'dirichlet/radiation' ) bc_ns_dirrad = .TRUE. 428 IF ( bc_ns == 'radiation/dirichlet' ) bc_ns_raddir = .TRUE. 429 430 ! 408 431 !-- Definition of names of areas used for computing statistics. They must 409 432 !-- be defined at this place, because they are allowed to be redefined by … … 416 439 !-- values are used for the parameters. 417 440 READ ( 11, d3par, END=20 ) 441 442 IF ( nested_run ) THEN 443 ! 444 !-- In nested domains, npex or npey can not be given in the d3par- 445 !-- NAMELIST because here the PE-grids are always defined in the 446 !-- nestpar-NAMELIST. Settings will be ignored. 447 IF ( ( npex /= -1 ) .OR. ( npey /= -1 ) ) THEN 448 message_string = 'npex or npey can not be given in \$d3par ' // & 449 'for nested runs & they will be ignored' 450 CALL message( 'parin', 'PA0352', 0, 1, 0, 6, 0 ) 451 ENDIF 452 ENDIF 418 453 419 454 ! -
palm/trunk/SOURCE/pmc_client.f90
r1763 r1764 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! cpp-statement added (nesting can only be used in parallel mode), 23 ! all kinds given in PALM style 23 24 ! 24 25 ! Former revisions: … … 35 36 !------------------------------------------------------------------------------! 36 37 38 #if defined( __parallel ) 37 39 38 40 use, intrinsic :: iso_c_binding 39 41 40 USE mpi 41 USE kinds, ONLY: wp, iwp 42 #if defined( __lc ) 43 USE MPI 44 #else 45 INCLUDE "mpif.h" 46 #endif 47 USE kinds 42 48 USE PMC_general, ONLY: ClientDef, DA_NameDef, DA_Namelen, PMC_STATUS_OK, PMC_DA_NAME_ERR, PeDef, ArrayDef, & 43 49 DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_G_GetName … … 51 57 ! data local to this MODULE 52 58 Type(ClientDef) :: me 53 INTEGER, PARAMETER :: dp = wp 59 !-- TO_DO: what is the meaning of this? Could variables declared in this module 60 !-- also have single precision? 61 ! INTEGER, PARAMETER :: dp = wp 54 62 55 63 INTEGER, save :: myIndex = 0 !Counter and unique number for Data Arrays … … 310 318 SUBROUTINE PMC_C_Set_DataArray_2d (array) 311 319 IMPLICIT none 320 !-- TO_DO: is double precision absolutely required here? 312 321 REAL(kind=dp),INTENT(IN),DIMENSION(:,:) :: array 313 322 !-- local variables … … 344 353 SUBROUTINE PMC_C_Set_DataArray_3d (array) 345 354 IMPLICIT none 355 !-- TO_DO: is double precision absolutely required here? 346 356 REAL(kind=dp),INTENT(IN),DIMENSION(:,:,:) :: array 347 357 !-- local variables … … 377 387 378 388 SUBROUTINE PMC_C_setInd_and_AllocMem 389 379 390 IMPLICIT none 380 391 381 392 INTEGER :: i, ierr 382 393 INTEGER :: arlen, myIndex, tag 383 INTEGER( kind=8):: bufsize ! Size of MPI data Window394 INTEGER(idp) :: bufsize ! Size of MPI data Window 384 395 TYPE(PeDef),POINTER :: aPE 385 396 TYPE(ArrayDef),POINTER :: ar … … 574 585 END SUBROUTINE PMC_C_PutBuffer 575 586 576 577 ! Private SUBROUTINEs 578 587 #endif 579 588 END MODULE pmc_client -
palm/trunk/SOURCE/pmc_general.f90
r1763 r1764 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! cpp-statement added (nesting can only be used in parallel mode), 23 ! all kinds given in PALM style 23 24 ! 24 25 ! Former revisions: … … 35 36 !------------------------------------------------------------------------------! 36 37 37 38 #if defined( __parallel ) 38 39 use, intrinsic :: iso_c_binding 39 USE :: MPI 40 41 USE kinds 42 43 #if defined( __lc ) 44 USE MPI 45 #else 46 INCLUDE "mpif.h" 47 #endif 40 48 41 49 IMPLICIT none … … 65 73 INTEGER :: dim_order ! Order of Dimensions: 2 = 2D array, 33 = 3D array 66 74 TYPE (c_ptr) :: data ! Pointer of data in server space 67 INTEGER( kind=8):: BufIndex ! index in Send Buffer75 INTEGER(idp) :: BufIndex ! index in Send Buffer 68 76 INTEGER :: BufSize ! size in Send Buffer 69 77 TYPE (c_ptr) :: SendBuf ! Pointer of Data in Send buffer … … 76 84 77 85 TYPE, PUBLIC :: PeDef 78 INTEGER( KIND=8):: NrEle ! Number of Elemets86 INTEGER(idp) :: NrEle ! Number of Elemets 79 87 TYPE (xy_ind), POINTER,DIMENSION(:) :: locInd ! xy index local array for remote PE 80 88 TYPE( ArrayDef), POINTER :: Arrays ! Pointer to Data Array List (Type ArrayDef) … … 83 91 84 92 TYPE, PUBLIC :: ClientDef 85 INTEGER( KIND=8):: TotalBufferSize93 INTEGER(idp) :: TotalBufferSize 86 94 INTEGER :: model_comm ! Communicator of this model 87 95 INTEGER :: inter_comm ! Inter communicator model and client … … 258 266 END FUNCTION DA_List_next 259 267 268 #endif 260 269 end MODULE pmc_general -
palm/trunk/SOURCE/pmc_handle_communicator.f90
r1763 r1764 1 MODULE PMC_handle_communicator 2 1 MODULE PMC_handle_communicator 3 2 4 3 !--------------------------------------------------------------------------------! … … 21 20 ! Current revisions: 22 21 ! ------------------ 23 ! 22 ! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at 23 ! the beginning of the variable list, 24 ! domain layout is read with new NAMELIST nestpar from standard file PARIN, 25 ! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp, 26 ! variable domain_layouts instead of m_couplers introduced for this NAMELIST, 27 ! general format changed to PALM style 24 28 ! 25 29 ! Former revisions: … … 30 34 ! Initial revision by K. Ketelsen 31 35 ! 32 ! Intoduction of the pure FORTRAN Palm Model Coupler (PMC) 12.11.2015 K. Ketelsen33 !34 36 ! Description: 35 37 ! ------------ 36 ! 37 ! Handle MPI Communicator in Palm Model Coupler 38 ! Handle MPI communicator in PALM model coupler 38 39 !------------------------------------------------------------------------------! 39 40 40 USE mpi 41 USE pmc_general, & 42 ONLY: PMC_STATUS_OK, PMC_STATUS_ERROR, PMC_MAX_MODELL 43 44 IMPLICIT none 45 46 ! Define Types 47 48 type PMC_layout 49 INTEGER :: comm_parent 50 INTEGER :: comm_cpl 51 INTEGER :: Id 52 INTEGER :: Parent_id 53 54 INTEGER :: npe_x 55 INTEGER :: npe_y 56 57 REAL(kind=8) :: lower_left_x 58 REAL(kind=8) :: lower_left_y 59 60 CHARACTER(len=32) :: name 61 END type PMC_layout 62 63 ! return status 64 PUBLIC PMC_STATUS_OK, PMC_STATUS_ERROR 65 INTEGER,parameter,PUBLIC :: PMC_ERROR_NPES = 1 ! illegal Number of PEs 66 INTEGER,parameter,PUBLIC :: PMC_ERROR_MPI = 2 ! MPI Error 67 INTEGER,parameter,PUBLIC :: PMC_ERRO_NOF = 3 ! No couple layout file found 41 #if defined( __parallel ) 42 USE kinds 43 44 #if defined( __lc ) 45 USE MPI 46 #else 47 INCLUDE "mpif.h" 48 #endif 49 50 USE pmc_general, & 51 ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell 52 53 IMPLICIT NONE 54 55 TYPE pmc_layout 56 57 CHARACTER(len=32) :: name 58 59 INTEGER :: id 60 INTEGER :: parent_id 61 INTEGER :: npe_x 62 INTEGER :: npe_y 63 64 REAL(wp) :: lower_left_x 65 REAL(wp) :: lower_left_y 66 67 END TYPE pmc_layout 68 69 PUBLIC pmc_status_ok, pmc_status_error 70 71 INTEGER, PARAMETER, PUBLIC :: pmc_error_npes = 1 ! illegal number of PEs 72 INTEGER, PARAMETER, PUBLIC :: pmc_namelist_error = 2 ! error(s) in nestpar namelist 73 INTEGER, PARAMETER, PUBLIC :: pmc_no_namelist_found = 3 ! No couple layout file found 68 74 69 75 ! Coupler Setup … … 72 78 INTEGER :: m_Parent_id !Coupler id of parent of this model 73 79 INTEGER :: m_NrOfCpl !Number of Coupler in layout file 74 type(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers !Information of all coupler80 TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers !Information of all coupler 75 81 76 82 ! MPI settings … … 91 97 INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client 92 98 93 !INTERFACE Section 94 95 INTERFACE PMC_is_RootModel 96 MODULE PROCEDURE PMC_is_RootModel 97 END INTERFACE PMC_is_RootModel 99 INTERFACE pmc_is_rootmodel 100 MODULE PROCEDURE pmc_is_rootmodel 101 END INTERFACE pmc_is_rootmodel 98 102 99 103 INTERFACE PMC_get_local_model_info … … 101 105 END INTERFACE PMC_get_local_model_info 102 106 103 PUBLIC PMC_init_model,PMC_get_local_model_info, PMC_is_RootModel 104 CONTAINS 105 106 SUBROUTINE PMC_init_model (comm, PMC_status) 107 IMPLICIT none 108 INTEGER,INTENT(OUT) :: comm 109 INTEGER,INTENT(OUT) :: PMC_status 110 111 !-- local declarations 112 INTEGER :: i,istat, ierr 113 INTEGER,DIMENSION(PMC_MAX_MODELL+1) :: start_PE 114 INTEGER :: m_my_CPL_rank 115 INTEGER :: tag, ClientCount 116 INTEGER,DIMENSION(PMC_MAX_MODELL) :: activeServer !I am active server for this client ID 117 118 PMC_status = PMC_STATUS_OK 107 PUBLIC pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel 108 109 CONTAINS 110 111 SUBROUTINE pmc_init_model( comm, nesting_mode, pmc_status ) 112 113 USE control_parameters, & 114 ONLY: message_string 115 116 USE pegrid, & 117 ONLY: myid 118 119 IMPLICIT NONE 120 121 CHARACTER(LEN=7), INTENT(OUT) :: nesting_mode 122 123 INTEGER, INTENT(OUT) :: comm 124 INTEGER, INTENT(OUT) :: pmc_status 125 126 INTEGER :: i, ierr, istat 127 INTEGER,DIMENSION(pmc_max_modell+1) :: start_pe 128 INTEGER :: m_my_cpl_rank 129 INTEGER :: tag, clientcount 130 INTEGER,DIMENSION(pmc_max_modell) :: activeserver ! I am active server for this client ID 131 132 pmc_status = pmc_status_ok 119 133 comm = -1 120 m_my_CPL_id = -1 121 ClientCount = 0 122 activeServer = -1 123 start_PE(:) = 0 124 125 CALL MPI_Comm_rank (MPI_COMM_WORLD, m_world_rank, istat) 126 CALL MPI_Comm_size (MPI_COMM_WORLD, m_world_npes, istat) 127 128 if(m_world_rank == 0) then ! only PE 0 of root model reads 129 130 CALL read_coupling_layout (PMC_status) 131 132 IF (PMC_status /= PMC_ERRO_NOF ) THEN 133 ! Compute Start PE of every model 134 start_PE(1) = 0 135 do i=2,m_NrOfCpl+1 136 start_pe(i) = start_PE(i-1) + m_couplers(i-1)%npe_x*m_couplers(i-1)%npe_y 137 END do 138 if(start_pe(m_NrOfCpl+1) /= m_world_npes) then 139 if(m_world_rank == 0) then 140 write(0,*) 'PMC ERROR: Coupler Setup Not equal Nr. MPI procs ',start_pe(m_NrOfCpl+1),m_world_npes 141 END if 142 CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat) 143 END if 144 END IF 145 END if 146 147 CALL MPI_Bcast (PMC_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 148 IF (PMC_status == PMC_ERRO_NOF ) THEN 149 if(m_world_rank == 0) write(0,*) 'PMC ERROR: file PMC_couple_layout not found' 150 CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat) 151 END IF 152 153 CALL MPI_Bcast (m_NrOfCpl, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 154 CALL MPI_Bcast (start_PE, m_NrOfCpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 155 156 ! Broadcast coupling layout 157 158 do i=1,m_NrOfCpl 159 CALL MPI_Bcast (m_couplers(i)%name, len(m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat) 160 CALL MPI_Bcast (m_couplers(i)%id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 161 CALL MPI_Bcast (m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 162 CALL MPI_Bcast (m_couplers(i)%npe_x, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 163 CALL MPI_Bcast (m_couplers(i)%npe_y, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 164 CALL MPI_Bcast (m_couplers(i)%lower_left_x, 1, MPI_REAL8, 0, MPI_COMM_WORLD, istat) 165 CALL MPI_Bcast (m_couplers(i)%lower_left_y, 1, MPI_REAL8, 0, MPI_COMM_WORLD, istat) 166 END do 167 168 ! Assign global MPI processes to individual models by setting the couple id 169 170 do i=1,m_NrOfCpl 171 if(m_world_rank >= start_PE(i) .and. m_world_rank < start_PE(i+1) ) then 172 m_my_CPL_id = i 134 m_my_cpl_id = -1 135 clientcount = 0 136 activeserver = -1 137 start_pe(:) = 0 138 139 CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat ) 140 CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat ) 141 ! 142 !-- Only PE 0 of root model reads 143 IF ( m_world_rank == 0 ) THEN 144 145 CALL read_coupling_layout( nesting_mode, pmc_status ) 146 147 IF ( pmc_status /= pmc_no_namelist_found .AND. & 148 pmc_status /= pmc_namelist_error ) & 149 THEN 150 ! 151 !-- Calculate start PE of every model 152 start_pe(1) = 0 153 DO i = 2, m_nrofcpl+1 154 start_pe(i) = start_pe(i-1) + & 155 m_couplers(i-1)%npe_x * m_couplers(i-1)%npe_y 156 ENDDO 157 158 ! 159 !-- The number of cores provided with the run must be the same as the 160 !-- total sum of cores required by all nest domains 161 !-- TO_DO: can we use > instead of /= ? 162 IF ( start_pe(m_nrofcpl+1) /= m_world_npes ) THEN 163 !-- TO_DO: this IF statement is redundant 164 IF ( m_world_rank == 0 ) THEN 165 WRITE ( message_string, '(A,I6,A,I6,A)' ) & 166 'nesting-setup requires more MPI procs (', & 167 start_pe(m_nrofcpl+1), ') than provided (', & 168 m_world_npes,')' 169 CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) 170 ENDIF 171 ENDIF 172 173 ENDIF 174 175 ENDIF 176 ! 177 !-- Broadcast the read status. This synchronises all other PEs with PE 0 of 178 !-- the root model. Without synchronisation, they would not behave in the 179 !-- correct way (e.g. they would not return in case of a missing NAMELIST) 180 CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 181 182 IF ( pmc_status == pmc_no_namelist_found ) THEN 183 ! 184 !-- Not a nested run; return the MPI_WORLD communicator 185 comm = MPI_COMM_WORLD 186 RETURN 187 188 ELSEIF ( pmc_status == pmc_namelist_error ) THEN 189 ! 190 !-- Only the root model gives the error message. Others are aborted by the 191 !-- message-routine with MPI_ABORT. Must be done this way since myid and 192 !-- comm2d have not yet been assigned at this point. 193 IF ( m_world_rank == 0 ) THEN 194 message_string = 'errors in \$nestpar' 195 CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 ) 196 ENDIF 197 198 ENDIF 199 200 CALL MPI_BCAST( m_nrofcpl, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 201 CALL MPI_BCAST( start_pe, m_nrofcpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 202 203 ! 204 !-- Broadcast coupling layout 205 DO i = 1, m_nrofcpl 206 CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) 207 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 208 CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 209 CALL MPI_BCAST( m_couplers(i)%npe_x, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 210 CALL MPI_BCAST( m_couplers(i)%npe_y, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 211 CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) 212 CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) 213 ENDDO 214 215 ! 216 !-- Assign global MPI processes to individual models by setting the couple id 217 DO i = 1, m_nrofcpl 218 IF ( m_world_rank >= start_pe(i) .AND. m_world_rank < start_pe(i+1) ) & 219 THEN 220 m_my_cpl_id = i 173 221 EXIT 174 END if 175 END do 176 m_my_CPL_rank = m_world_rank-start_PE(i) 177 178 ! MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach) 179 ! The communictors for the individual models a created by MPI_Comm_split 180 ! The color of the model is represented by the Coupler id 181 182 CALL MPI_Comm_split (MPI_COMM_WORLD, m_my_CPL_id, m_my_CPL_rank, comm, istat) 183 if(istat /= MPI_SUCCESS) then 184 if(m_world_rank == 0) write(0,*) 'PMC: Error in MPI_Comm_split ' 185 CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat) 186 END if 187 188 ! Get size and rank of the model running on THIS PE 189 190 CALL MPI_Comm_rank (comm, m_model_rank, istat) 191 CALL MPI_Comm_size (comm, m_model_npes, istat) 192 193 ! Pe 0 brodcasts the Parent ID and Id of every model 194 195 do i=1,m_NrOfCpl 196 CALL MPI_Bcast (m_couplers(i)%Parent_Id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 197 CALL MPI_Bcast (m_couplers(i)%Id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) 198 END do 199 222 ENDIF 223 ENDDO 224 m_my_cpl_rank = m_world_rank - start_pe(i) 225 226 ! 227 !-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach). 228 !-- The communictors for the individual models as created by MPI_COMM_SPLIT. 229 !-- The color of the model is represented by the coupler id 230 CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, & 231 istat ) 232 IF ( istat /= MPI_SUCCESS ) THEN 233 ! 234 !-- TO_DO: replace by message-call 235 !-- TO_DO: Can this really happen, or is this just for the debugging phase? 236 IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in MPI_Comm_split ' 237 CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat ) 238 ENDIF 239 240 ! 241 !-- Get size and rank of the model running on this PE 242 CALL MPI_COMM_RANK( comm, m_model_rank, istat ) 243 CALL MPI_COMM_SIZE( comm, m_model_npes, istat ) 244 245 ! 246 !-- Broadcast (from PE 0) the parent id and id of every model 247 DO i = 1, m_nrofcpl 248 CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, & 249 MPI_COMM_WORLD, istat ) 250 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, & 251 MPI_COMM_WORLD, istat ) 252 ENDDO 253 254 ! 255 !-- TO_DO: describe what is happening here, and why 200 256 m_model_comm = comm 201 257 202 ! create Intercommunicator to server and clients 203 ! MPI_Intercomm_create creates an intercommunicator between 2 groups of different colors 204 ! The grouping with done prior with MPI_Comm_split 205 206 do i=2,m_NrOfCpl 207 if(m_couplers(i)%Parent_Id == m_my_CPL_id) then !collect server PEs 208 tag = 500+i 209 !kk write(0,'(a,6i4)') 'server Part ',m_world_rank,m_world_npes,m_model_rank,m_model_npes,tag,start_pe(i) 210 CALL MPI_Intercomm_create (comm, 0, MPI_COMM_WORLD, start_pe(i), & 211 tag, m_to_client_comm(i), istat) 212 213 clientCount = clientCount+1 214 activeServer(i) = 1 215 else if (i == m_my_CPL_id) then !collect client PEs 216 tag = 500+i 217 CALL MPI_Intercomm_create (comm, 0, MPI_COMM_WORLD, start_pe(m_couplers(i)%Parent_Id), & 218 tag, m_to_server_comm, istat) 219 !kk write(0,'(a,7i4)') 'client Part',m_world_rank,m_world_npes,m_model_rank,m_model_npes,tag, start_pe(m_couplers(i)%Parent_Id) 220 END if 221 if(istat /= MPI_SUCCESS) then 222 if(m_world_rank == 0) write(0,*) 'PMC: Error in Coupler Setup ' 223 CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat) 224 END if 225 END do 226 227 ! If I am server, count nr. of clients 228 ! Although this loop is symetric on all processes, the active server flag is valid only on the individual PE. 229 230 ALLOCATE(PMC_Server_for_Client(ClientCount+1)) 231 ClientCount = 0 232 do i=2,m_NrOfCpl 233 if(activeServer(i) == 1) then 234 ClientCount = clientCount+1 235 PMC_Server_for_Client(ClientCount) = i 236 END if 237 END do 238 PMC_Server_for_Client(ClientCount+1) = -1 239 240 ! Get size of the server model 241 242 if(m_my_CPL_id > 1) then 243 CALL MPI_Comm_remote_size (m_to_server_comm, m_server_remote_size, istat) 244 else 245 m_server_remote_size = -1 ! root model does not have a server 246 END if 247 248 ! write(0,'(a,a,1x,9i7)') 'New Communicator ',trim(m_couplers(m_my_CPL_id)%name),m_world_npes,m_model_npes,m_world_rank, & 249 ! m_model_rank,m_my_CPL_id,m_my_CPL_rank,m_server_remote_size,ClientCount 250 251 return 258 ! 259 !-- Create intercommunicator between server and clients. 260 !-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of 261 !-- different colors. 262 !-- The grouping was done above with MPI_COMM_SPLIT 263 DO i = 2, m_nrofcpl 264 265 IF ( m_couplers(i)%parent_id == m_my_cpl_id ) THEN 266 ! 267 !-- Collect server PEs 268 !-- TO_DO: explain in more details, what is done here 269 tag = 500 + i 270 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), & 271 tag, m_to_client_comm(i), istat) 272 clientcount = clientcount + 1 273 activeserver(i) = 1 274 275 ELSEIF ( i == m_my_cpl_id) THEN 276 ! 277 !-- Collect client PEs 278 !-- TO_DO: explain in more detail, what is happening here 279 tag = 500 + i 280 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, & 281 start_pe(m_couplers(i)%parent_id), & 282 tag, m_to_server_comm, istat ) 283 ENDIF 284 285 IF ( istat /= MPI_SUCCESS ) THEN 286 ! 287 !-- TO_DO: replace by message-call 288 !-- TO_DO: can this really happen, or is this just for debugging? 289 IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in Coupler Setup ' 290 CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat ) 291 ENDIF 292 293 ENDDO 294 295 ! 296 !-- If I am server, count the number of clients that I have 297 !-- Although this loop is symmetric on all processes, the "activeserver" flag 298 !-- is true (==1) on the respective individual PE only. 299 ALLOCATE( pmc_server_for_client(clientcount+1) ) 300 301 clientcount = 0 302 DO i = 2, m_nrofcpl 303 IF ( activeserver(i) == 1 ) THEN 304 clientcount = clientcount + 1 305 pmc_server_for_client(clientcount) = i 306 ENDIF 307 ENDDO 308 !-- TO_DO: explain why this is done 309 pmc_server_for_client(clientcount+1) = -1 310 311 ! 312 !-- Get the size of the server model 313 !-- TO_DO: what does "size" mean here? Number of PEs? 314 IF ( m_my_cpl_id > 1 ) THEN 315 CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size, & 316 istat) 317 ELSE 318 ! 319 !-- The root model does not have a server 320 m_server_remote_size = -1 ! 321 ENDIF 322 ! 323 !-- Set myid to non-tero value except for the root domain. This is a setting 324 !-- for the message routine which is called at the end of pmci_init. That 325 !-- routine outputs messages for myid = 0, only. However, myid has not been 326 !-- assigened so far, so that all PEs of the root model would output a 327 !-- message. To avoid this, set myid to some other value except for PE0 of the 328 !-- root domain. 329 IF ( m_world_rank /= 0 ) myid = 1 330 252 331 END SUBROUTINE PMC_init_model 253 332 254 ! Make module private variables available to palm 255 256 SUBROUTINE PMC_get_local_model_info (my_CPL_id, CPL_name, npe_x, npe_y, lower_left_x, lower_left_y) 257 IMPLICIT none 258 INTEGER,INTENT(OUT),optional :: my_CPL_id 259 CHARACTER(len=*),INTENT(OUT),optional :: CPL_name 260 INTEGER,INTENT(OUT),optional :: npe_x 261 INTEGER,INTENT(OUT),optional :: npe_y 262 REAL(kind=8),INTENT(OUT),optional :: lower_left_x 263 REAL(kind=8),INTENT(OUT),optional :: lower_left_y 264 265 if(present(my_CPL_id)) my_CPL_id = m_my_CPL_id 266 if(present(CPL_name)) CPL_name = m_couplers(my_CPL_id)%name 267 if(present(npe_x)) npe_x = m_couplers(my_CPL_id)%npe_x 268 if(present(npe_y)) npe_y = m_couplers(my_CPL_id)%npe_y 269 if(present(lower_left_x)) lower_left_x = m_couplers(my_CPL_id)%lower_left_x 270 if(present(lower_left_y)) lower_left_y = m_couplers(my_CPL_id)%lower_left_y 271 272 return 273 END SUBROUTINE PMC_get_local_model_info 274 275 LOGICAL function PMC_is_RootModel () 276 IMPLICIT none 277 278 PMC_is_RootModel = (m_my_CPL_id == 1) 279 280 return 281 END function PMC_is_RootModel 282 333 334 ! 335 !-- Make module private variables available to palm 336 !-- TO_DO: why can't they be available from the beginning, i.e. why do they 337 !-- first have to be declared as different private variables? 338 SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, & 339 npe_x, npe_y, lower_left_x, & 340 lower_left_y ) 341 342 USE kinds 343 344 IMPLICIT NONE 345 346 CHARACTER(LEN=*), INTENT(OUT), OPTIONAL :: cpl_name 347 INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_id 348 INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_parent_id 349 INTEGER, INTENT(OUT), OPTIONAL :: npe_x 350 INTEGER, INTENT(OUT), OPTIONAL :: npe_y 351 REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_x 352 REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_y 353 354 !-- TO_DO: is the PRESENT clause really required here? 355 IF ( PRESENT( my_cpl_id ) ) my_cpl_id = m_my_cpl_id 356 IF ( PRESENT( my_cpl_parent_id ) ) my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id 357 IF ( PRESENT( cpl_name ) ) cpl_name = m_couplers(my_cpl_id)%name 358 IF ( PRESENT( npe_x ) ) npe_x = m_couplers(my_cpl_id)%npe_x 359 IF ( PRESENT( npe_y ) ) npe_y = m_couplers(my_cpl_id)%npe_y 360 IF ( PRESENT( lower_left_x ) ) lower_left_x = m_couplers(my_cpl_id)%lower_left_x 361 IF ( PRESENT( lower_left_y ) ) lower_left_y = m_couplers(my_cpl_id)%lower_left_y 362 363 END SUBROUTINE pmc_get_local_model_info 364 365 366 367 LOGICAL function pmc_is_rootmodel( ) 368 369 IMPLICIT NONE 370 371 pmc_is_rootmodel = ( m_my_cpl_id == 1 ) 372 373 END FUNCTION pmc_is_rootmodel 374 375 376 377 378 !-- TO_DO: what does this comment mean? 283 379 ! Private SUBROUTINEs 284 285 SUBROUTINE read_coupling_layout (PMC_status) 286 IMPLICIT none 287 INTEGER,INTENT(INOUT) :: PMC_status 288 INTEGER :: i,iunit,istat 289 CHARACTER(LEN=*), PARAMETER :: fname = 'PMC_couple_layout' 290 LOGICAL :: lex 291 292 m_NrOfCpl = 0 380 SUBROUTINE read_coupling_layout( nesting_mode, pmc_status ) 381 382 IMPLICIT NONE 383 384 CHARACTER(LEN=7) :: nesting_mode 385 386 INTEGER, INTENT(INOUT) :: pmc_status 387 INTEGER :: i, istat, iunit 388 389 TYPE(pmc_layout), DIMENSION(pmc_max_modell) :: domain_layouts 390 391 392 NAMELIST /nestpar/ domain_layouts, nesting_mode 393 394 ! 395 !-- Initialize some coupling variables 396 domain_layouts(1:pmc_max_modell)%id = -1 397 m_nrofcpl = 0 293 398 iunit = 345 294 399 295 PMC_STATUS = PMC_STATUS_OK 296 INQUIRE(file=TRIM(fname), exist=lex) 297 IF (.NOT. lex) THEN 298 PMC_status = PMC_ERRO_NOF 400 pmc_status = pmc_status_ok 401 402 ! 403 !-- Open the NAMELIST-file and read the nesting layout 404 CALL check_open( 11 ) 405 READ ( 11, nestpar, IOSTAT=istat ) 406 407 IF ( istat < 0 ) THEN 408 ! 409 !-- No nestpar-NAMELIST found 410 pmc_status = pmc_no_namelist_found 411 ! 412 !-- Set filepointer to the beginning of the file. Otherwise PE0 will later 413 !-- be unable to read the inipar-NAMELIST 414 REWIND ( 11 ) 299 415 RETURN 300 END IF 301 302 open(iunit,file=TRIM(fname),status='OLD') 303 do i=1,PMC_MAX_MODELL 304 read(iunit,*,iostat=istat) m_couplers(i)%name & 305 , m_couplers(i)%id,m_couplers(i)%Parent_id & 306 , m_couplers(i)%npe_x,m_couplers(i)%npe_y & 307 , m_couplers(i)%lower_left_x, m_couplers(i)%lower_left_y 308 if(istat /= 0) EXIT 309 310 write(0,'(a,a,1x,4i7,1x,2F10.2)') 'Set up Model ',trim(m_couplers(i)%name),m_couplers(i)%id,m_couplers(i)%Parent_id, & 311 m_couplers(i)%npe_x,m_couplers(i)%npe_y, & 312 m_couplers(i)%lower_left_x,m_couplers(i)%lower_left_y 313 314 m_NrOfCpl = i 315 END do 316 close(iunit) 317 318 return 319 END SUBROUTINE read_coupling_layout 320 321 END MODULE PMC_handle_communicator 416 417 ELSEIF ( istat > 0 ) THEN 418 ! 419 !-- Errors in reading nestpar-NAMELIST 420 pmc_status = pmc_namelist_error 421 RETURN 422 423 ENDIF 424 425 ! 426 !-- Output location message 427 CALL location_message( 'initialize communicators for nesting', .FALSE. ) 428 ! 429 !-- Assign the layout to the internally used variable 430 m_couplers = domain_layouts 431 432 ! 433 !-- Get the number of nested models given in the nestpar-NAMELIST 434 DO i = 1, pmc_max_modell 435 436 IF ( m_couplers(i)%id /= -1 .AND. i <= pmc_max_modell ) THEN 437 WRITE ( 0, '(A,A,1X,4I7,1X,2F10.2)' ) 'Set up Model ', & 438 TRIM( m_couplers(i)%name ), m_couplers(i)%id, & 439 m_couplers(i)%Parent_id, m_couplers(i)%npe_x, & 440 m_couplers(i)%npe_y, m_couplers(i)%lower_left_x, & 441 m_couplers(i)%lower_left_y 442 ELSE 443 ! 444 !-- When id=-1 is found for the first time, the list of domains is 445 !-- finished (or latest after pmc_max_modell entries 446 m_nrofcpl = i - 1 447 EXIT 448 ENDIF 449 450 ENDDO 451 452 END SUBROUTINE read_coupling_layout 453 454 #endif 455 END MODULE pmc_handle_communicator -
palm/trunk/SOURCE/pmc_interface.f90
r1763 r1764 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! +cpl_parent_id, 23 ! cpp-statements for nesting replaced by __parallel statements, 24 ! errors output with message-subroutine, 25 ! index bugfixes in pmci_interp_tril_all, 26 ! some adjustments to PALM style 23 27 ! 24 28 ! Former revisions: … … 35 39 !------------------------------------------------------------------------------! 36 40 37 38 USE mpi 39 40 ! 41 !-- PALM modules 41 USE arrays_3d, & 42 ONLY: dzu, dzw, e, e_p, pt, pt_p, q, q_p, te_m, tu_m, tv_m, tw_m, u, & 43 u_p, v, v_p, w, w_p, zu, zw, z0 44 45 USE control_parameters, & 46 ONLY: dt_3d, dz, humidity, message_string, nest_bound_l, & 47 nest_bound_r, nest_bound_s, nest_bound_n, passive_scalar, & 48 simulated_time, topography, volume_flow 49 50 USE cpulog, & 51 ONLY: cpu_log, log_point_s 52 53 USE grid_variables, & 54 ONLY: dx, dy 55 56 USE indices, & 57 ONLY: nbgp, nx, nxl, nxlg, nxlu, nxr, nxrg, ny, nyn, nyng, nys, nysg, & 58 nysv, nz, nzb, nzb_s_inner, nzb_u_inner, nzb_u_outer, & 59 nzb_v_inner, nzb_v_outer, nzb_w_inner, nzb_w_outer, nzt 60 42 61 USE kinds 43 USE pegrid, & 44 ONLY: myid, numprocs, comm2d, comm1dx, comm1dy, myidx, myidy, collective_wait 45 USE arrays_3d, & 46 ONLY: u, v, w, e, pt, q, u_p, v_p, w_p, e_p, pt_p, q_p, z0, dzu, dzw, zu, zw, & 47 tu_m, tv_m, tw_m, te_m 48 USE indices, & 49 ONLY: nx, ny, nz, nxl, nxr, nys, nyn, nzb, nzt, nxlu, nysv, nxlg, nxrg, & 50 nysg, nyng, nbgp, nzb_u_inner, nzb_v_inner, nzb_w_inner, & 51 nzb_s_inner, nzb_u_outer, nzb_v_outer, nzb_w_outer 52 USE control_parameters, & 53 ONLY: dz, dt_3d, simulated_time, message_string, volume_flow, & 54 nest_bound_l, nest_bound_r, nest_bound_s, nest_bound_n, & 55 topography, humidity, passive_scalar 56 USE grid_variables, & 57 ONLY: dx, dy 58 USE cpulog, & 59 ONLY: cpu_log, log_point_s 60 61 ! 62 !-- PMC modules 63 USE pmc_general, & 64 ONLY: pmc_status_ok, pmc_max_modell, da_namelen 65 USE pmc_handle_communicator, & 66 ONLY: pmc_init_model, pmc_is_rootmodel, pmc_get_local_model_info, & 67 pmc_server_for_client 68 USE pmc_mpi_Wrapper, & 69 ONLY: pmc_recv_from_client, pmc_send_to_server, pmc_recv_from_server, & 70 pmc_send_to_client, pmc_bcast 71 USE pmc_server, & 72 ONLY: pmc_serverinit, pmc_s_getnextarray, & 73 pmc_s_set_dataarray, pmc_s_setind_and_allocmem, & 74 pmc_s_set_2d_index_list, pmc_s_fillbuffer,pmc_s_getdata_from_buffer 75 USE pmc_client, & 76 ONLY: pmc_clientinit, pmc_set_dataarray_name, pmc_c_get_2d_index_list, & 77 pmc_c_getnextarray, pmc_c_set_dataarray, pmc_c_setind_and_allocmem, & 78 pmc_c_putbuffer, pmc_c_getbuffer 62 63 #if defined( __parallel ) 64 #if defined( __lc ) 65 USE MPI 66 #else 67 INCLUDE "mpif.h" 68 #endif 69 70 USE pegrid, & 71 ONLY: collective_wait, comm1dx, comm1dy, comm2d, myid, myidx, myidy, & 72 numprocs 73 74 USE pmc_client, & 75 ONLY: pmc_clientinit, pmc_c_getnextarray, pmc_c_get_2d_index_list, & 76 pmc_c_getbuffer, pmc_c_putbuffer, pmc_c_setind_and_allocmem, & 77 pmc_c_set_dataarray, pmc_set_dataarray_name 78 79 USE pmc_general, & 80 ONLY: da_namelen, pmc_max_modell, pmc_status_ok 81 82 USE pmc_handle_communicator, & 83 ONLY: pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel, & 84 pmc_no_namelist_found, pmc_server_for_client 85 86 USE pmc_mpi_wrapper, & 87 ONLY: pmc_bcast, pmc_recv_from_client, pmc_recv_from_server, & 88 pmc_send_to_client, pmc_send_to_server 89 90 USE pmc_server, & 91 ONLY: pmc_serverinit, pmc_s_fillbuffer, pmc_s_getdata_from_buffer, & 92 pmc_s_getnextarray, pmc_s_setind_and_allocmem, & 93 pmc_s_set_dataarray, pmc_s_set_2d_index_list 94 95 #endif 79 96 80 97 IMPLICIT NONE 81 98 99 !-- TO_DO: a lot of lines (including comments) in this file exceed the 80 char 100 !-- limit. Try to reduce as much as possible 101 102 !-- TO_DO: shouldn't we use public as default here? Only a minority of the 103 !-- variables is private. 82 104 PRIVATE !: Note that the default publicity is here set to private. 83 105 84 106 ! 85 107 !-- Constants 86 INTEGER(iwp), PARAMETER, PUBLIC 87 INTEGER(iwp), PARAMETER, PUBLIC 108 INTEGER(iwp), PARAMETER, PUBLIC :: client_to_server = 2 !: 109 INTEGER(iwp), PARAMETER, PUBLIC :: server_to_client = 1 !: 88 110 89 111 ! 90 112 !-- Coupler setup 91 INTEGER(iwp), PUBLIC, SAVE :: cpl_id !: 92 CHARACTER(LEN=32), PUBLIC, SAVE :: cpl_name !: 93 INTEGER(iwp), PUBLIC, SAVE :: cpl_npex !: 94 INTEGER(iwp), PUBLIC, SAVE :: cpl_npey !: 113 INTEGER(iwp), PUBLIC, SAVE :: cpl_id = 1 !: 114 CHARACTER(LEN=32), PUBLIC, SAVE :: cpl_name !: 115 INTEGER(iwp), PUBLIC, SAVE :: cpl_npex !: 116 INTEGER(iwp), PUBLIC, SAVE :: cpl_npey !: 117 INTEGER(iwp), PUBLIC, SAVE :: cpl_parent_id !: 95 118 96 119 ! 97 120 !-- Control parameters, will be made input parameters later 98 CHARACTER(LEN=7), PUBLIC, SAVE :: nesting_mode = 'two-way' !: 99 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_l = -1.0_wp !: 100 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_r = -1.0_wp !: 101 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_s = -1.0_wp !: 102 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_n = -1.0_wp !: 103 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_t = -1.0_wp !: 121 CHARACTER(LEN=7), PUBLIC, SAVE :: nesting_mode = 'two-way' !: steering parameter for one- or two-way nesting 122 123 LOGICAL, PUBLIC, SAVE :: nested_run = .FALSE. !: general switch if nested run or not 124 125 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_l = -1.0_wp !: 126 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_r = -1.0_wp !: 127 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_s = -1.0_wp !: 128 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_n = -1.0_wp !: 129 REAL(wp), PUBLIC, SAVE :: anterp_relax_length_t = -1.0_wp !: 104 130 105 131 ! … … 120 146 REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET, PUBLIC :: qc !: 121 147 122 INTEGER(iwp), DIMENSION(5) :: coarse_bound !: Moved here form map_fine_to_coarse.148 INTEGER(iwp), DIMENSION(5) :: coarse_bound !: 123 149 REAL(wp), PUBLIC, SAVE :: xexl !: 124 150 REAL(wp), PUBLIC, SAVE :: xexr !: … … 229 255 INTEGER(iwp) :: nx 230 256 INTEGER(iwp) :: ny 231 INTEGER (iwp):: nz257 INTEGER(iwp) :: nz 232 258 REAL(wp) :: dx 233 259 REAL(wp) :: dy … … 247 273 TYPE(coarsegrid_def), SAVE :: cg !: 248 274 249 ! 250 !-- Interface section. 275 276 INTERFACE pmci_client_datatrans 277 MODULE PROCEDURE pmci_client_datatrans 278 END INTERFACE 279 280 INTERFACE pmci_client_initialize 281 MODULE PROCEDURE pmci_client_initialize 282 END INTERFACE 283 284 INTERFACE pmci_client_synchronize 285 MODULE PROCEDURE pmci_client_synchronize 286 END INTERFACE 287 288 INTERFACE pmci_ensure_nest_mass_conservation 289 MODULE PROCEDURE pmci_ensure_nest_mass_conservation 290 END INTERFACE 291 251 292 INTERFACE pmci_init 252 293 MODULE PROCEDURE pmci_init 253 294 END INTERFACE 254 295 255 296 INTERFACE pmci_modelconfiguration 256 297 MODULE PROCEDURE pmci_modelconfiguration 257 298 END INTERFACE 258 299 300 INTERFACE pmci_server_initialize 301 MODULE PROCEDURE pmci_server_initialize 302 END INTERFACE 303 259 304 INTERFACE pmci_server_synchronize 260 305 MODULE PROCEDURE pmci_server_synchronize 261 306 END INTERFACE 262 263 INTERFACE pmci_client_synchronize 264 MODULE PROCEDURE pmci_client_synchronize 265 END INTERFACE 266 267 INTERFACE pmci_server_datatrans 268 MODULE PROCEDURE pmci_server_datatrans 269 END INTERFACE 270 271 INTERFACE pmci_client_datatrans 272 MODULE PROCEDURE pmci_client_datatrans 273 END INTERFACE 274 307 275 308 INTERFACE pmci_update_new 276 309 MODULE PROCEDURE pmci_update_new 277 310 END INTERFACE 278 311 279 INTERFACE pmci_ensure_nest_mass_conservation 280 MODULE PROCEDURE pmci_ensure_nest_mass_conservation 281 END INTERFACE 282 283 INTERFACE pmci_server_initialize 284 MODULE PROCEDURE pmci_server_initialize 285 END INTERFACE 286 287 INTERFACE pmci_client_initialize 288 MODULE PROCEDURE pmci_client_initialize 289 END INTERFACE 290 312 PUBLIC pmci_client_datatrans 313 PUBLIC pmci_client_initialize 314 PUBLIC pmci_client_synchronize 315 PUBLIC pmci_ensure_nest_mass_conservation 291 316 PUBLIC pmci_init 292 317 PUBLIC pmci_modelconfiguration 318 PUBLIC pmci_server_datatrans 319 PUBLIC pmci_server_initialize 293 320 PUBLIC pmci_server_synchronize 294 PUBLIC pmci_client_synchronize295 PUBLIC pmci_server_datatrans296 PUBLIC pmci_client_datatrans297 321 PUBLIC pmci_update_new 298 PUBLIC pmci_ensure_nest_mass_conservation299 PUBLIC pmci_server_initialize300 PUBLIC pmci_client_initialize301 322 302 323 … … 305 326 306 327 SUBROUTINE pmci_init( world_comm ) 328 307 329 IMPLICIT NONE 308 330 309 INTEGER, INTENT(OUT) :: world_comm !: 310 311 INTEGER(iwp) :: ierr !: 312 INTEGER(iwp) :: istat !: 313 INTEGER(iwp) :: PMC_status !: 314 315 316 #if defined PMC_ACTIVE 317 CALL pmc_init_model( world_comm, pmc_status ) 318 IF ( pmc_status /= pmc_status_ok ) THEN 319 CALL MPI_ABORT( MPI_COMM_WORLD, istat, ierr ) 331 INTEGER, INTENT(OUT) :: world_comm !: 332 333 #if defined( __parallel ) 334 335 INTEGER(iwp) :: ierr !: 336 INTEGER(iwp) :: istat !: 337 INTEGER(iwp) :: pmc_status !: 338 339 340 CALL pmc_init_model( world_comm, nesting_mode, pmc_status ) 341 342 IF ( pmc_status == pmc_no_namelist_found ) THEN 343 ! 344 !-- This is not a nested run 345 ! 346 !-- TO_DO: this wouldn't be required any more? 347 world_comm = MPI_COMM_WORLD 348 cpl_id = 1 349 cpl_name = "" 350 cpl_npex = 2 351 cpl_npey = 2 352 lower_left_coord_x = 0.0_wp 353 lower_left_coord_y = 0.0_wp 354 RETURN 355 ELSE 356 ! 357 !-- Set the general steering switch which tells PALM that its a nested run 358 nested_run = .TRUE. 320 359 ENDIF 321 CALL pmc_get_local_model_info( my_cpl_id = cpl_id, cpl_name = cpl_name, npe_x=cpl_npex, npe_y = cpl_npey, & 322 lower_left_x = lower_left_coord_x, lower_left_y = lower_left_coord_y ) 360 361 CALL pmc_get_local_model_info( my_cpl_id = cpl_id, & 362 my_cpl_parent_id = cpl_parent_id, & 363 cpl_name = cpl_name, & 364 npe_x = cpl_npex, npe_y = cpl_npey, & 365 lower_left_x = lower_left_coord_x, & 366 lower_left_y = lower_left_coord_y ) 367 ! 368 !-- Message that communicators for nesting are initialized. 369 !-- Attention: myid has been set at the end of pmc_init_model in order to 370 !-- guarantee that only PE0 of the root domain does the output. 371 CALL location_message( 'finished', .TRUE. ) 372 ! 373 !-- Reset myid to its default value 374 myid = 0 323 375 #else 324 world_comm = MPI_COMM_WORLD 376 ! 377 !-- Nesting cannot be used in serial mode. cpl_id is set to root domain (1) 378 !-- because no location messages would be generated otherwise. 379 !-- world_comm is given a dummy value to avoid compiler warnings (INTENT(OUT) 380 !-- should get an explicit value) 325 381 cpl_id = 1 326 cpl_name = "" 327 cpl_npex = 2 328 cpl_npey = 2 329 lower_left_coord_x = 0.0_wp 330 lower_left_coord_y = 0.0_wp 382 nested_run = .FALSE. 383 world_comm = 1 331 384 #endif 332 385 … … 336 389 337 390 SUBROUTINE pmci_modelconfiguration 391 338 392 IMPLICIT NONE 339 393 394 CALL location_message( 'setup the nested model configuration', .FALSE. ) 340 395 CALL pmci_setup_coordinates !: Compute absolute coordinates valid for all models 341 CALL pmci_setup_client !: Initialize PMC Client (Must be called before pmc_ palm_SetUp_Server)396 CALL pmci_setup_client !: Initialize PMC Client (Must be called before pmc_setup_server) 342 397 CALL pmci_setup_server !: Initialize PMC Server 398 CALL location_message( 'finished', .TRUE. ) 343 399 344 400 END SUBROUTINE pmci_modelconfiguration … … 347 403 348 404 SUBROUTINE pmci_setup_server 405 406 #if defined( __parallel ) 349 407 IMPLICIT NONE 350 408 … … 371 429 372 430 373 #if defined PMC_ACTIVE 374 CALL pmc_serverinit ! Initialize PMC Server 375 376 ! 377 !-- Get coordinates from all Clients. 431 ! 432 ! Initialize the PMC server 433 CALL pmc_serverinit 434 435 ! 436 !-- Get coordinates from all clients 378 437 DO m = 1, SIZE( pmc_server_for_client ) - 1 379 438 client_id = pmc_server_for_client(m) … … 391 450 392 451 ! 393 !-- Find the highest client level in the coarse grid for the reduced z transfer 452 !-- Find the highest client level in the coarse grid for the reduced z 453 !-- transfer 394 454 DO k = 1, nz 395 455 IF ( zw(k) > fval(1) ) THEN … … 404 464 ALLOCATE( cl_coord_y(-nbgp:ny_cl+nbgp) ) 405 465 406 CALL pmc_recv_from_client( client_id, cl_coord_x, SIZE( cl_coord_x ), 0, 11, ierr ) 407 CALL pmc_recv_from_client( client_id, cl_coord_y, SIZE( cl_coord_y ), 0, 12, ierr ) 466 CALL pmc_recv_from_client( client_id, cl_coord_x, SIZE( cl_coord_x ),& 467 0, 11, ierr ) 468 CALL pmc_recv_from_client( client_id, cl_coord_y, SIZE( cl_coord_y ),& 469 0, 12, ierr ) 408 470 WRITE ( 0, * ) 'receive from pmc Client ', client_id, nx_cl, ny_cl 409 471 410 472 define_coarse_grid_real(1) = lower_left_coord_x 411 473 define_coarse_grid_real(2) = lower_left_coord_y 412 define_coarse_grid_real(3) = 0 ! KK currently not used. 474 !-- TO_DO: remove this? 475 define_coarse_grid_real(3) = 0 ! KK currently not used. 413 476 define_coarse_grid_real(4) = 0 414 477 define_coarse_grid_real(5) = dx 415 478 define_coarse_grid_real(6) = dy 416 define_coarse_grid_real(7) = lower_left_coord_x + ( nx + 1 ) * dx ! AH: corrected 6.2.2015417 define_coarse_grid_real(8) = lower_left_coord_y + ( ny + 1 ) * dy ! AH: corrected 6.2.2015418 define_coarse_grid_real(9) = dz ! AH: added 24.2.2015479 define_coarse_grid_real(7) = lower_left_coord_x + ( nx + 1 ) * dx 480 define_coarse_grid_real(8) = lower_left_coord_y + ( ny + 1 ) * dy 481 define_coarse_grid_real(9) = dz 419 482 420 483 define_coarse_grid_int(1) = nx … … 437 500 ! 438 501 !-- Send coarse grid information to client 439 CALL pmc_send_to_client( client_id, Define_coarse_grid_real, 9, 0, 21, ierr ) 440 CALL pmc_send_to_client( client_id, Define_coarse_grid_int, 3, 0, 22, ierr ) 441 442 ! 443 !-- Send local grid to client. 502 CALL pmc_send_to_client( client_id, Define_coarse_grid_real, 9, 0, & 503 21, ierr ) 504 CALL pmc_send_to_client( client_id, Define_coarse_grid_int, 3, 0, & 505 22, ierr ) 506 507 ! 508 !-- Send local grid to client 444 509 CALL pmc_send_to_client( client_id, coord_x, nx+1+2*nbgp, 0, 24, ierr ) 445 510 CALL pmc_send_to_client( client_id, coord_y, ny+1+2*nbgp, 0, 25, ierr ) 446 511 447 512 ! 448 !-- Also send the dzu-, dzw-, zu- and zw-arrays here .513 !-- Also send the dzu-, dzw-, zu- and zw-arrays here 449 514 CALL pmc_send_to_client( client_id, dzu, nz_cl + 1, 0, 26, ierr ) 450 515 CALL pmc_send_to_client( client_id, dzw, nz_cl + 1, 0, 27, ierr ) … … 454 519 ENDIF 455 520 456 CALL MPI_B cast( nomatch, 1, MPI_INTEGER, 0, comm2d, ierr )521 CALL MPI_BCAST( nomatch, 1, MPI_INTEGER, 0, comm2d, ierr ) 457 522 IF ( nomatch /= 0 ) THEN 458 WRITE ( message_string, * ) 'Error: nested client domain does not fit', &459 ' into its server domain'523 WRITE ( message_string, * ) 'Error: nested client domain does ', & 524 'not fit into its server domain' 460 525 CALL message( 'pmc_palm_setup_server', 'PA0XYZ', 1, 2, 0, 6, 0 ) 461 526 ENDIF 462 527 463 CALL MPI_B cast( nz_cl, 1, MPI_INTEGER, 0, comm2d, ierr )528 CALL MPI_BCAST( nz_cl, 1, MPI_INTEGER, 0, comm2d, ierr ) 464 529 465 530 CALL pmci_create_index_list … … 468 533 !-- Include couple arrays into server content 469 534 DO WHILE ( pmc_s_getnextarray( client_id, myname ) ) 470 CALL pmci_set_array_pointer( myName, client_id = client_id, nz_cl = nz_cl ) 535 CALL pmci_set_array_pointer( myname, client_id = client_id, & 536 nz_cl = nz_cl ) 471 537 ENDDO 472 538 CALL pmc_s_setind_and_allocmem( client_id ) 473 539 ENDDO 474 540 475 #endif476 477 478 541 CONTAINS 479 542 480 543 481 544 SUBROUTINE pmci_create_index_list 545 482 546 IMPLICIT NONE 547 483 548 INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: coarse_bound_all !: 484 549 INTEGER(iwp) :: i !: … … 504 569 CALL pmc_recv_from_client( client_id, size_of_array, 2, 0, 40, ierr ) 505 570 ALLOCATE( coarse_bound_all(size_of_array(1),size_of_array(2)) ) 506 CALL pmc_recv_from_client( client_id, coarse_bound_all, SIZE( coarse_bound_all ), 0, 41, ierr ) 571 CALL pmc_recv_from_client( client_id, coarse_bound_all, & 572 SIZE( coarse_bound_all ), 0, 41, ierr ) 507 573 508 574 ! … … 519 585 ALLOCATE( index_list(6,ic) ) 520 586 521 CALL MPI_C omm_size( comm1dx, npx, ierr )522 CALL MPI_C omm_size( comm1dy, npy, ierr )587 CALL MPI_COMM_SIZE( comm1dx, npx, ierr ) 588 CALL MPI_COMM_SIZE( comm1dy, npy, ierr ) 523 589 524 590 nrx = nxr - nxl + 1 ! +1 in index because FORTRAN indexing starts with 1, palm with 0 … … 532 598 scoord(1) = px 533 599 scoord(2) = py 534 CALL MPI_C art_rank( comm2d, scoord, server_pe, ierr )600 CALL MPI_CART_RANK( comm2d, scoord, server_pe, ierr ) 535 601 536 602 ic = ic + 1 … … 546 612 CALL pmc_s_set_2d_index_list( client_id, index_list(:,1:ic) ) 547 613 ELSE 548 ALLOCATE( index_list(6,1) ) 614 ALLOCATE( index_list(6,1) ) ! Dummy allocate 549 615 CALL pmc_s_set_2d_index_list( client_id, index_list ) 550 616 ENDIF … … 554 620 END SUBROUTINE pmci_create_index_list 555 621 556 622 #endif 557 623 END SUBROUTINE pmci_setup_server 558 624 … … 560 626 561 627 SUBROUTINE pmci_setup_client 628 629 #if defined( __parallel ) 562 630 IMPLICIT NONE 631 632 CHARACTER(LEN=DA_Namelen) :: myname !: 633 563 634 INTEGER(iwp) :: i !: 564 635 INTEGER(iwp) :: ierr !: … … 579 650 REAL(wp), DIMENSION(4) :: ztt !: 580 651 581 CHARACTER(LEN=DA_Namelen) :: myname !: 582 583 584 #if defined PMC_ACTIVE 585 IF ( .not. pmc_is_rootmodel() ) THEN ! Root Model does not have Server and is not a client 652 653 !-- TO_DO: describe what is happening in this if-clause 654 !-- Root Model does not have Server and is not a client 655 IF ( .NOT. pmc_is_rootmodel() ) THEN 586 656 CALL pmc_clientinit 587 657 … … 596 666 597 667 ! 598 !-- Update this list appropritely and also in create_client_arrays and in pmci_set_array_pointer. 599 !-- If a variable is removed, it only has tobe removed from here. 600 CALL pmc_set_dataarray_name( lastentry = .true. ) 601 602 ! 603 !-- Send grid to Server 668 !-- Update this list appropritely and also in create_client_arrays and in 669 !-- pmci_set_array_pointer. 670 !-- If a variable is removed, it only has to be removed from here. 671 CALL pmc_set_dataarray_name( lastentry = .TRUE. ) 672 673 ! 674 !-- Send grid to server 604 675 val(1) = nx 605 676 val(2) = ny … … 621 692 622 693 ! 623 !-- Receive also the dz-,zu- and zw-arrays here. 694 !-- Receive also the dz-,zu- and zw-arrays here. 695 !-- TO_DO: what is the meaning of above comment + remove write statements 696 !-- and give this informations in header 624 697 WRITE(0,*) 'Coarse grid from Server ' 625 698 WRITE(0,*) 'startx_tot = ',define_coarse_grid_real(1) … … 635 708 ENDIF 636 709 637 CALL MPI_B cast( define_coarse_grid_real, 9, MPI_REAL, 0, comm2d, ierr )638 CALL MPI_B cast( define_coarse_grid_int,3, MPI_INTEGER, 0, comm2d, ierr )710 CALL MPI_BCAST( define_coarse_grid_real, 9, MPI_REAL, 0, comm2d, ierr ) 711 CALL MPI_BCAST( define_coarse_grid_int, 3, MPI_INTEGER, 0, comm2d, ierr ) 639 712 640 713 cg%dx = define_coarse_grid_real(5) … … 658 731 !-- Get coarse grid coordinates and vales of the z-direction from server 659 732 IF ( myid == 0) THEN 660 CALL pmc_recv_from_server( cg%coord_x, cg%nx + 1 + 2 * nbgp, 0, 24, ierr ) 661 CALL pmc_recv_from_server( cg%coord_y, cg%ny + 1 + 2 * nbgp, 0, 25, ierr ) 733 CALL pmc_recv_from_server( cg%coord_x, cg%nx + 1 + 2 * nbgp, 0, 24, & 734 ierr ) 735 CALL pmc_recv_from_server( cg%coord_y, cg%ny + 1 + 2 * nbgp, 0, 25, & 736 ierr ) 662 737 CALL pmc_recv_from_server( cg%dzu, cg%nz + 1, 0, 26, ierr ) 663 738 CALL pmc_recv_from_server( cg%dzw, cg%nz + 1, 0, 27, ierr ) … … 668 743 ! 669 744 !-- and broadcast this information 670 CALL MPI_Bcast( cg%coord_x, cg%nx + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, ierr ) 671 CALL MPI_Bcast( cg%coord_y, cg%ny + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, ierr ) 672 CALL MPI_Bcast( cg%dzu, cg%nz + 1, MPI_REAL, 0, comm2d, ierr ) 673 CALL MPI_Bcast( cg%dzw, cg%nz + 1, MPI_REAL, 0, comm2d, ierr ) 674 CALL MPI_Bcast( cg%zu, cg%nz + 2, MPI_REAL, 0, comm2d, ierr ) 675 CALL MPI_Bcast( cg%zw, cg%nz + 2, MPI_REAL, 0, comm2d, ierr ) 745 CALL MPI_BCAST( cg%coord_x, cg%nx + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, & 746 ierr ) 747 CALL MPI_BCAST( cg%coord_y, cg%ny + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, & 748 ierr ) 749 CALL MPI_BCAST( cg%dzu, cg%nz + 1, MPI_REAL, 0, comm2d, ierr ) 750 CALL MPI_BCAST( cg%dzw, cg%nz + 1, MPI_REAL, 0, comm2d, ierr ) 751 CALL MPI_BCAST( cg%zu, cg%nz + 2, MPI_REAL, 0, comm2d, ierr ) 752 CALL MPI_BCAST( cg%zw, cg%nz + 2, MPI_REAL, 0, comm2d, ierr ) 676 753 677 754 CALL pmci_map_fine_to_coarse_grid 678 679 755 CALL pmc_c_get_2d_index_list 680 756 … … 682 758 !-- Include couple arrays into client content. 683 759 DO WHILE ( pmc_c_getnextarray( myname ) ) 684 CALL pmci_create_client_arrays ( myName, icl, icr, jcs, jcn, cg%nz ) ! Klaus, why the c-arrays are still up to cg%nz?? 760 !-- TO_DO: Klaus, why the c-arrays are still up to cg%nz?? 761 CALL pmci_create_client_arrays ( myname, icl, icr, jcs, jcn, cg%nz ) 685 762 ENDDO 686 763 CALL pmc_c_setind_and_allocmem 687 764 688 765 ! 689 !-- Precompute interpolation coefficients and client-array indices .766 !-- Precompute interpolation coefficients and client-array indices 690 767 CALL pmci_init_interp_tril 691 768 … … 695 772 696 773 ! 697 !-- Define the SGS-TKE scaling factor based on the grid-spacing ratio .774 !-- Define the SGS-TKE scaling factor based on the grid-spacing ratio 698 775 CALL pmci_init_tkefactor 699 776 700 777 ! 701 778 !-- Two-way coupling 702 IF ( nesting_mode == 'two-way' ) THEN779 IF ( nesting_mode == 'two-way' ) THEN 703 780 CALL pmci_init_anterp_tophat 704 781 ENDIF … … 712 789 713 790 ! 714 !-- Why not just simply? test this!791 !-- TO_DO: Why not just simply? test this! 715 792 !area_t_l = ( nx + 1 ) * (ny + 1 ) * dx * dy 716 793 717 ENDIF ! IF ( .not. PMC_is_RootModel ) 718 #endif 719 794 ENDIF 720 795 721 796 CONTAINS … … 723 798 724 799 SUBROUTINE pmci_map_fine_to_coarse_grid 800 725 801 IMPLICIT NONE 802 726 803 INTEGER(iwp), DIMENSION(5,numprocs) :: coarse_bound_all !: 727 804 INTEGER(iwp), DIMENSION(2) :: size_of_array !: … … 730 807 REAL(wp) :: coarse_dy !: 731 808 REAL(wp) :: loffset !: 809 REAL(wp) :: noffset !: 732 810 REAL(wp) :: roffset !: 733 REAL(wp) :: noffset !:734 811 REAL(wp) :: soffset !: 735 812 … … 792 869 ! 793 870 !-- Note that MPI_Gather receives data from all processes in the rank order. 794 CALL MPI_G ather( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, &871 CALL MPI_GATHER( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, & 795 872 MPI_INTEGER, 0, comm2d, ierr ) 796 873 … … 968 1045 DO i = nxl - 1, nxl 969 1046 DO j = nys, nyn 970 nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1047 nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i), & 1048 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 971 1049 ENDDO 972 1050 ENDDO … … 978 1056 i = nxr + 1 979 1057 DO j = nys, nyn 980 nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1058 nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i), & 1059 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 981 1060 ENDDO 982 1061 nzt_topo_nestbc_r = nzt_topo_nestbc_r + 1 … … 987 1066 DO j = nys - 1, nys 988 1067 DO i = nxl, nxr 989 nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1068 nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i), & 1069 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 990 1070 ENDDO 991 1071 ENDDO … … 997 1077 j = nyn + 1 998 1078 DO i = nxl, nxr 999 nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1079 nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i), & 1080 nzb_v_inner(j,i), nzb_w_inner(j,i) ) 1000 1081 ENDDO 1001 1082 nzt_topo_nestbc_n = nzt_topo_nestbc_n + 1 … … 1003 1084 1004 1085 ! 1005 !-- Then determine the maximum number of near-wall nodes per wall point based on the grid-spacing ratios. 1006 nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r, nzt_topo_nestbc_s, nzt_topo_nestbc_n ) 1007 ni = CEILING( cg%dx / dx ) / 2 ! Note that the outer division must be integer division. 1008 nj = CEILING( cg%dy / dy ) / 2 ! Note that the outer division must be integer division. 1086 !-- Then determine the maximum number of near-wall nodes per wall point based 1087 !-- on the grid-spacing ratios. 1088 nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r, & 1089 nzt_topo_nestbc_s, nzt_topo_nestbc_n ) 1090 1091 ! 1092 !-- Note that the outer division must be integer division. 1093 ni = CEILING( cg%dx / dx ) / 2 1094 nj = CEILING( cg%dy / dy ) / 2 1009 1095 nk = 1 1010 1096 DO k = 1, nzt_topo_max … … 1018 1104 1019 1105 ! 1020 !-- First horizontal walls .1021 !-- Left boundary .1106 !-- First horizontal walls 1107 !-- Left boundary 1022 1108 IF ( nest_bound_l ) THEN 1023 1109 ALLOCATE( logc_u_l(nzb:nzt_topo_nestbc_l, nys:nyn, 1:2) ) … … 1033 1119 1034 1120 DO j = nys, nyn 1035 1036 ! 1037 !-- Left boundary for u. 1121 ! 1122 !-- Left boundary for u 1038 1123 i = 0 1039 1124 kb = nzb_u_inner(j,i) 1040 1125 k = kb + 1 1041 1126 wall_index = kb 1042 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1127 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1128 wall_index, z0(j,i), kb, direction, ncorr ) 1043 1129 logc_u_l(k,j,1) = lc 1044 1130 logc_ratio_u_l(k,j,1,0:ncorr-1) = lcr(0:ncorr-1) … … 1046 1132 1047 1133 ! 1048 !-- Left boundary for v .1134 !-- Left boundary for v 1049 1135 i = -1 1050 1136 kb = nzb_v_inner(j,i) 1051 1137 k = kb + 1 1052 1138 wall_index = kb 1053 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1139 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1140 wall_index, z0(j,i), kb, direction, ncorr ) 1054 1141 logc_v_l(k,j,1) = lc 1055 1142 logc_ratio_v_l(k,j,1,0:ncorr-1) = lcr(0:ncorr-1) 1056 1143 lcr(0:ncorr-1) = 1.0_wp 1057 ENDDO 1058 ENDIF 1059 1060 ! 1061 !-- Right boundary. 1144 1145 ENDDO 1146 ENDIF 1147 1148 ! 1149 !-- Right boundary 1062 1150 IF ( nest_bound_r ) THEN 1063 1151 ALLOCATE( logc_u_r(nzb:nzt_topo_nestbc_r,nys:nyn,1:2) ) … … 1071 1159 direction = 1 1072 1160 inc = 1 1073 DO j = nys, nyn 1074 1161 DO j = nys, nyn 1075 1162 ! 1076 1163 !-- Right boundary for u. … … 1079 1166 k = kb + 1 1080 1167 wall_index = kb 1081 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1168 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1169 wall_index, z0(j,i), kb, direction, ncorr ) 1082 1170 logc_u_r(k,j,1) = lc 1083 1171 logc_ratio_u_r(k,j,1,0:ncorr-1) = lcr(0:ncorr-1) … … 1090 1178 k = kb + 1 1091 1179 wall_index = kb 1092 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1180 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1181 wall_index, z0(j,i), kb, direction, ncorr ) 1093 1182 logc_v_r(k,j,1) = lc 1094 1183 logc_ratio_v_r(k,j,1,0:ncorr-1) = lcr(0:ncorr-1) 1095 1184 lcr(0:ncorr-1) = 1.0_wp 1096 ENDDO 1097 ENDIF 1098 1099 ! 1100 !-- South boundary. 1185 1186 ENDDO 1187 ENDIF 1188 1189 ! 1190 !-- South boundary 1101 1191 IF ( nest_bound_s ) THEN 1102 1192 ALLOCATE( logc_u_s(nzb:nzt_topo_nestbc_s,nxl:nxr,1:2) ) … … 1111 1201 inc = 1 1112 1202 DO i = nxl, nxr 1113 1114 1203 ! 1115 1204 !-- South boundary for u. … … 1118 1207 k = kb + 1 1119 1208 wall_index = kb 1120 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1209 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1210 wall_index, z0(j,i), kb, direction, ncorr ) 1121 1211 logc_u_s(k,i,1) = lc 1122 1212 logc_ratio_u_s(k,i,1,0:ncorr-1) = lcr(0:ncorr-1) … … 1124 1214 1125 1215 ! 1126 !-- South boundary for v .1216 !-- South boundary for v 1127 1217 j = 0 1128 1218 kb = nzb_v_inner(j,i) 1129 1219 k = kb + 1 1130 1220 wall_index = kb 1131 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1221 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1222 wall_index, z0(j,i), kb, direction, ncorr ) 1132 1223 logc_v_s(k,i,1) = lc 1133 1224 logc_ratio_v_s(k,i,1,0:ncorr-1) = lcr(0:ncorr-1) … … 1137 1228 1138 1229 ! 1139 !-- North boundary .1230 !-- North boundary 1140 1231 IF ( nest_bound_n ) THEN 1141 1232 ALLOCATE( logc_u_n(nzb:nzt_topo_nestbc_n,nxl:nxr,1:2) ) … … 1149 1240 direction = 1 1150 1241 inc = 1 1151 DO i = nxl, nxr 1152 1242 DO i = nxl, nxr 1153 1243 ! 1154 1244 !-- North boundary for u. … … 1157 1247 k = kb + 1 1158 1248 wall_index = kb 1159 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1249 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1250 wall_index, z0(j,i), kb, direction, ncorr ) 1160 1251 logc_u_n(k,i,1) = lc 1161 1252 logc_ratio_u_n(k,i,1,0:ncorr-1) = lcr(0:ncorr-1) … … 1168 1259 k = kb + 1 1169 1260 wall_index = kb 1170 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr ) 1261 CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, & 1262 wall_index, z0(j,i), kb, direction, ncorr ) 1171 1263 logc_v_n(k,i,1) = lc 1172 1264 logc_ratio_v_n(k,i,1,0:ncorr-1) = lcr(0:ncorr-1) 1173 1265 lcr(0:ncorr-1) = 1.0_wp 1266 1174 1267 ENDDO 1175 1268 ENDIF … … 2011 2104 END SUBROUTINE pmci_init_tkefactor 2012 2105 2013 2106 #endif 2014 2107 END SUBROUTINE pmci_setup_client 2015 2108 … … 2017 2110 2018 2111 SUBROUTINE pmci_setup_coordinates 2112 2113 #if defined( __parallel ) 2019 2114 IMPLICIT NONE 2115 2020 2116 INTEGER(iwp) :: i !: 2021 2117 INTEGER(iwp) :: j !: … … 2033 2129 coord_y(j) = lower_left_coord_y + j * dy 2034 2130 ENDDO 2035 2131 2132 #endif 2036 2133 END SUBROUTINE pmci_setup_coordinates 2037 2134 … … 2040 2137 SUBROUTINE pmci_server_synchronize 2041 2138 2139 #if defined( __parallel ) 2042 2140 ! 2043 2141 !-- Unify the time steps for each model and synchronize. … … 2066 2164 ! 2067 2165 !-- Broadcast the unified time step to all server processes. 2068 CALL MPI_B cast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )2166 CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr ) 2069 2167 2070 2168 ! … … 2076 2174 ENDIF 2077 2175 ENDDO 2078 2176 2177 #endif 2079 2178 END SUBROUTINE pmci_server_synchronize 2080 2179 … … 2083 2182 SUBROUTINE pmci_client_synchronize 2084 2183 2184 #if defined( __parallel ) 2085 2185 ! 2086 2186 !-- Unify the time steps for each model and synchronize. … … 2105 2205 ! 2106 2206 !-- Broadcast the unified time step to all server processes. 2107 CALL MPI_B cast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )2207 CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr ) 2108 2208 ENDIF 2109 2209 2210 #endif 2110 2211 END SUBROUTINE pmci_client_synchronize 2111 2212 … … 2113 2214 2114 2215 SUBROUTINE pmci_server_datatrans( direction ) 2216 2115 2217 IMPLICIT NONE 2218 2116 2219 INTEGER(iwp),INTENT(IN) :: direction !: 2220 2221 #if defined( __parallel ) 2117 2222 INTEGER(iwp) :: client_id !: 2118 2223 INTEGER(iwp) :: i !: … … 2138 2243 ! 2139 2244 !-- Broadcast the unified time step to all server processes. 2140 CALL MPI_B cast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )2245 CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr ) 2141 2246 2142 2247 DO m = 1, SIZE( PMC_Server_for_Client ) - 1 … … 2185 2290 ENDDO 2186 2291 2292 #endif 2187 2293 END SUBROUTINE pmci_server_datatrans 2188 2294 … … 2190 2296 2191 2297 SUBROUTINE pmci_client_datatrans( direction ) 2298 2192 2299 IMPLICIT NONE 2300 2193 2301 INTEGER(iwp), INTENT(IN) :: direction !: 2302 2303 #if defined( __parallel ) 2194 2304 INTEGER(iwp) :: ierr !: 2195 2305 INTEGER(iwp) :: icl !: … … 2214 2324 ! 2215 2325 !-- Broadcast the unified time step to all server processes. 2216 CALL MPI_B cast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )2326 CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr ) 2217 2327 CALL cpu_log( log_point_s(70), 'PMC model sync', 'stop' ) 2218 2328 … … 3179 3289 !-- Spatial under-relaxation. 3180 3290 fra = frax(l) * fray(m) * fraz(n) 3291 !-- TO_DO: why not KIND=wp ? 3181 3292 fc(n,m,l) = ( 1.0_wp - fra ) * fc(n,m,l) + fra * cellsum / REAL( nfc, KIND=KIND(cellsum) ) 3182 3293 ENDDO … … 3186 3297 END SUBROUTINE pmci_anterp_tophat 3187 3298 3188 3299 #endif 3189 3300 END SUBROUTINE pmci_client_datatrans 3190 3301 … … 3193 3304 SUBROUTINE pmci_update_new 3194 3305 3306 #if defined( __parallel ) 3195 3307 ! 3196 3308 !-- Copy the interpolated/anterpolated boundary values to the _p … … 3219 3331 3220 3332 ! 3221 !-- Find out later if nesting would work without __nopointer. 3333 !-- TO_DO: Find out later if nesting would work without __nopointer. 3334 #endif 3222 3335 3223 3336 END SUBROUTINE pmci_update_new … … 3226 3339 3227 3340 SUBROUTINE pmci_set_array_pointer( name, client_id, nz_cl ) 3341 3228 3342 IMPLICIT NONE 3229 3343 … … 3232 3346 CHARACTER(LEN=*), INTENT(IN) :: name !: 3233 3347 3348 #if defined( __parallel ) 3234 3349 REAL(wp), POINTER, DIMENSION(:,:) :: p_2d !: 3235 3350 REAL(wp), POINTER, DIMENSION(:,:,:) :: p_3d !: … … 3238 3353 3239 3354 3240 #if defined PMC_ACTIVE3241 3355 NULLIFY( p_3d ) 3242 3356 NULLIFY( p_2d ) … … 3257 3371 CALL pmc_s_set_dataarray( client_id, p_2d ) 3258 3372 ELSE 3259 IF ( myid == 0 ) WRITE( 0, * ) 'PMC set_array_Pointer -> no pointer p_2d or p_3d associated ' 3260 CALL MPI_Abort( MPI_COMM_WORLD, istat, ierr ) 3373 ! 3374 !-- Give only one message for the root domain 3375 IF ( myid == 0 .AND. cpl_id == 1 ) THEN 3376 3377 message_string = 'pointer for array "' // TRIM( name ) // & 3378 '" can''t be associated' 3379 CALL message( 'pmci_set_array_pointer', 'PA0117', 3, 2, 0, 6, 0 ) 3380 ELSE 3381 ! 3382 !-- Avoid others to continue 3383 CALL MPI_BARRIER( comm2d, ierr ) 3384 ENDIF 3261 3385 ENDIF 3262 3386 3263 3387 #endif 3264 3265 3388 END SUBROUTINE pmci_set_array_pointer 3266 3389 … … 3268 3391 3269 3392 SUBROUTINE pmci_create_client_arrays( name, is, ie, js, je, nzc ) 3393 3270 3394 IMPLICIT NONE 3395 3271 3396 INTEGER(iwp), INTENT(IN) :: ie !: 3272 3397 INTEGER(iwp), INTENT(IN) :: is !: … … 3276 3401 CHARACTER(LEN=*), INTENT(IN) :: name !: 3277 3402 3403 #if defined( __parallel ) 3278 3404 REAL(wp), POINTER,DIMENSION(:,:) :: p_2d !: 3279 3405 REAL(wp), POINTER,DIMENSION(:,:,:) :: p_3d !: … … 3282 3408 3283 3409 3284 #if defined PMC_ACTIVE3285 3410 NULLIFY( p_3d ) 3286 3411 NULLIFY( p_2d ) … … 3317 3442 CALL pmc_c_set_dataarray( p_2d ) 3318 3443 ELSE 3319 IF ( myid == 0 ) WRITE( 0 , * ) 'PMC create_client_arrays -> no pointer p_2d or p_3d associated ' 3320 CALL MPI_Abort( MPI_COMM_WORLD, istat, ierr ) 3444 ! 3445 !-- Give only one message for the first client domain 3446 IF ( myid == 0 .AND. cpl_id == 2 ) THEN 3447 3448 message_string = 'pointer for array "' // TRIM( name ) // & 3449 '" can''t be associated' 3450 CALL message( 'pmci_create_client_arrays', 'PA0170', 3, 2, 0, 6, 0 ) 3451 ELSE 3452 ! 3453 !-- Avoid others to continue 3454 CALL MPI_BARRIER( comm2d, ierr ) 3455 ENDIF 3321 3456 ENDIF 3457 3322 3458 #endif 3323 3324 3459 END SUBROUTINE pmci_create_client_arrays 3325 3460 … … 3327 3462 3328 3463 SUBROUTINE pmci_server_initialize 3464 3465 #if defined( __parallel ) 3329 3466 IMPLICIT NONE 3467 3330 3468 INTEGER(iwp) :: client_id !: 3331 3469 INTEGER(iwp) :: m !: … … 3338 3476 ENDDO 3339 3477 3478 #endif 3340 3479 END SUBROUTINE pmci_server_initialize 3341 3480 … … 3344 3483 SUBROUTINE pmci_client_initialize 3345 3484 3485 #if defined( __parallel ) 3346 3486 IMPLICIT NONE 3487 3347 3488 INTEGER(iwp) :: i !: 3348 3489 INTEGER(iwp) :: icl !: … … 3428 3569 INTEGER(iwp) :: i !: 3429 3570 INTEGER(iwp) :: ib !: 3571 INTEGER(iwp) :: ie !: 3430 3572 INTEGER(iwp) :: j !: 3431 3573 INTEGER(iwp) :: jb !: 3574 INTEGER(iwp) :: je !: 3432 3575 INTEGER(iwp) :: k !: 3433 3576 INTEGER(iwp) :: k1 !: … … 3448 3591 3449 3592 ib = nxl 3450 jb = nys 3593 ie = nxr 3594 jb = nys 3595 je = nyn 3451 3596 IF ( nest_bound_l ) THEN 3597 ib = nxl - 1 3452 3598 IF ( var == 'u' ) THEN ! For u, nxl is a ghost node, but not for the other variables. 3453 ib = nxl + 13599 ib = nxl 3454 3600 ENDIF 3455 3601 ENDIF 3456 3602 IF ( nest_bound_s ) THEN 3603 jb = nys - 1 3457 3604 IF ( var == 'v' ) THEN ! For v, nys is a ghost node, but not for the other variables. 3458 jb = nys + 13605 jb = nys 3459 3606 ENDIF 3460 3607 ENDIF 3608 IF ( nest_bound_r ) THEN 3609 ie = nxr + 1 3610 ENDIF 3611 IF ( nest_bound_n ) THEN 3612 je = nyn + 1 3613 ENDIF 3461 3614 3462 3615 ! 3463 3616 !-- Trilinear interpolation. 3464 DO i = ib, nxr3465 DO j = jb, nyn3466 DO k = kb(j,i), nzt 3617 DO i = ib, ie 3618 DO j = jb, je 3619 DO k = kb(j,i), nzt + 1 3467 3620 l = ic(i) 3468 3621 m = jc(j) … … 3517 3670 END SUBROUTINE pmci_interp_tril_all 3518 3671 3672 #endif 3519 3673 END SUBROUTINE pmci_client_initialize 3520 3674 … … 3523 3677 SUBROUTINE pmci_ensure_nest_mass_conservation 3524 3678 3679 #if defined( __parallel ) 3525 3680 ! 3526 3681 !-- Adjust the volume-flow rate through the top boundary … … 3635 3790 ENDDO 3636 3791 3792 #endif 3637 3793 END SUBROUTINE pmci_ensure_nest_mass_conservation 3638 3794 -
palm/trunk/SOURCE/pmc_mpi_wrapper.f90
r1763 r1764 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! cpp-statement added (nesting can only be used in parallel mode), 23 ! kind-parameters adjusted to PALM-kinds 23 24 ! 24 25 ! Former revisions: … … 35 36 !------------------------------------------------------------------------------! 36 37 38 #if defined( __parallel ) 37 39 use, intrinsic :: iso_c_binding 38 40 39 USE mpi 40 USE kinds, ONLY: wp 41 #if defined( __lc ) 42 USE MPI 43 #else 44 INCLUDE "mpif.h" 45 #endif 46 USE kinds 41 47 USE PMC_handle_communicator, ONLY: m_to_server_comm, m_to_client_comm, m_model_comm, m_model_rank 42 48 IMPLICIT none … … 44 50 SAVE 45 51 46 INTEGER, PARAMETER :: dp = wp 52 !-- TO_DO: what is the meaning of this? Could variables declared in this module 53 !-- also have single precision? 54 ! INTEGER, PARAMETER :: dp = wp 47 55 48 56 … … 149 157 SUBROUTINE PMC_Send_to_Server_real_r1 (buf, n, Server_rank, tag, ierr) 150 158 IMPLICIT none 159 !-- TO_DO: has buf always to be of dp-kind, or can wp used here 160 !-- this effects all respective declarations in this file 151 161 REAL(kind=dp), DIMENSION(:), INTENT(IN) :: buf 152 162 INTEGER, INTENT(IN) :: n … … 485 495 IMPLICIT none 486 496 REAL(kind=wp),DIMENSION(:),POINTER,INTENT(INOUT) :: array 487 INTEGER( kind=8),INTENT(IN):: idim1497 INTEGER(idp),INTENT(IN) :: idim1 488 498 Type(c_ptr),INTENT(OUT),optional :: base_ptr 489 499 … … 516 526 END FUNCTION PMC_TIME 517 527 528 #endif 518 529 END MODULE pmc_mpi_wrapper -
palm/trunk/SOURCE/pmc_server.f90
r1763 r1764 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! cpp-statement added (nesting can only be used in parallel mode) 23 23 ! 24 24 ! Former revisions: … … 35 35 !------------------------------------------------------------------------------! 36 36 37 #if defined( __parallel ) 37 38 use, intrinsic :: iso_c_binding 38 39 39 USE mpi 40 USE kinds, ONLY: wp, iwp 40 #if defined( __lc ) 41 USE MPI 42 #else 43 INCLUDE "mpif.h" 44 #endif 45 USE kinds 41 46 USE PMC_general, ONLY: ClientDef, PMC_MAX_MODELL,PMC_sort, DA_NameDef, DA_Desclen, DA_Namelen, & 42 47 PMC_G_SetName, PMC_G_GetName, PeDef, ArrayDef … … 60 65 PUBLIC PMC_Server_for_Client 61 66 62 INTEGER, PARAMETER :: dp = wp 67 !-- TO_DO: what is the meaning of this? Could variables declared in this module 68 !-- also have single precision? 69 ! INTEGER, PARAMETER :: dp = wp 63 70 64 71 ! INTERFACE section … … 225 232 IMPLICIT none 226 233 INTEGER,INTENT(IN) :: ClientId 234 !-- TO_DO: has array always to be of dp-kind, or can wp used here 235 !-- this effects all respective declarations in this file 227 236 REAL(kind=dp),INTENT(IN),DIMENSION(:,:) :: array 228 237 !-- local variables … … 282 291 INTEGER :: arlen, myIndex, tag 283 292 INTEGER :: rCount ! count MPI requests 284 INTEGER( kind=8):: bufsize ! Size of MPI data Window293 INTEGER(idp) :: bufsize ! Size of MPI data Window 285 294 TYPE(PeDef),POINTER :: aPE 286 295 TYPE(ArrayDef),POINTER :: ar … … 347 356 do while (PMC_S_GetNextArray ( ClientId, myName,i)) 348 357 ar => aPE%Arrays 358 !-- TO_DO: Adressrechnung ueberlegen? 349 359 ar%SendBuf = c_loc(base_array(ar%BufIndex)) !kk Adressrechnung ueberlegen 350 360 if(ar%BufIndex+ar%BufSize > bufsize) then 361 !-- TO_DO: can this error really happen, and what can be the reason? 351 362 write(0,'(a,i4,4i7,1x,a)') 'Buffer too small ',i,ar%BufIndex,ar%BufSize,ar%BufIndex+ar%BufSize,bufsize,trim(myName) 352 363 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr) … … 402 413 end do 403 414 else 415 !-- TO_DO: can this error really happen, and what can be the reason? 404 416 write(0,*) "Illegal Order of Dimension ",ar%dim_order 405 417 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr); … … 458 470 end do 459 471 else 472 !-- TO_DO: can this error really happen, and what can be the reason? 460 473 write(0,*) "Illegal Order of Dimension ",ar%dim_order 461 474 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr); … … 624 637 END SUBROUTINE Set_PE_index_list 625 638 639 #endif 626 640 END MODULE pmc_server -
palm/trunk/SOURCE/time_integration.f90
r1763 r1764 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! PMC_ACTIVE flags removed, 22 ! nest synchronization after first call of timestep 22 23 ! 23 24 ! Former revisions: … … 238 239 USE pegrid 239 240 240 #if defined( PMC_ACTIVE )241 241 USE pmc_interface, & 242 ONLY: client_to_server, nest ing_mode,&242 ONLY: client_to_server, nested_run, nesting_mode, & 243 243 pmci_ensure_nest_mass_conservation, pmci_client_datatrans, & 244 244 pmci_client_initialize, pmci_client_synchronize, & 245 245 pmci_server_datatrans, pmci_server_initialize, & 246 246 pmci_server_synchronize, pmci_update_new, server_to_client 247 #endif248 247 249 248 USE production_e_mod, & … … 282 281 IF ( simulated_time == 0.0_wp ) CALL timestep 283 282 283 ! 284 !-- Synchronize the timestep in case of nested run. 285 !-- The server side must be called first 286 IF ( nested_run ) THEN 287 CALL pmci_server_synchronize 288 CALL pmci_client_synchronize 289 ENDIF 290 284 291 CALL run_control 285 286 292 287 293 ! … … 302 308 ENDIF 303 309 304 #if defined( PMC_ACTIVE ) 305 ! 306 !-- TO_DO: try to give more meaningful comments here 307 !-- Domain nesting: From server to client commmunication 308 !-- ( direction=SERVER_TO_CLIENT ) 309 !-- Nest initial conditions 310 ! 311 !-- Send initial condition data from server to client 312 CALL pmci_server_initialize 313 ! 314 !-- Receive and interpolate initial data on client 315 CALL pmci_client_initialize 316 ! 317 !-- TO_DO, maybe removed 318 !-- Obs. Nesting may be unnecessary at this point. 319 ! 320 !-- Nest boundary conditions 321 CALL pmci_server_datatrans( server_to_client ) 322 CALL pmci_client_datatrans( server_to_client ) 323 324 IF ( nesting_mode == 'two-way' ) THEN 325 CALL pmci_server_datatrans( client_to_server ) 326 CALL pmci_client_datatrans( client_to_server ) 327 ! 328 !-- Exchange_horiz is needed for all server-domains after the anterpolation 329 CALL exchange_horiz( u, nbgp ) 330 CALL exchange_horiz( v, nbgp ) 331 CALL exchange_horiz( w, nbgp ) 332 CALL exchange_horiz( pt, nbgp ) 333 IF ( .NOT. constant_diffusion ) CALL exchange_horiz( e, nbgp ) 334 intermediate_timestep_count = 0 335 CALL pres 310 IF ( nested_run ) THEN 311 ! 312 !-- TO_DO: try to give more meaningful comments here 313 !-- Domain nesting: From server to client commmunication 314 !-- ( direction=SERVER_TO_CLIENT ) 315 !-- Nest initial conditions 316 ! 317 !-- Send initial condition data from server to client 318 CALL pmci_server_initialize 319 ! 320 !-- Receive and interpolate initial data on client 321 CALL pmci_client_initialize 322 ! 323 !-- TO_DO, maybe removed 324 !-- Obs. Nesting may be unnecessary at this point. 325 ! 326 !-- Nest boundary conditions 327 CALL pmci_server_datatrans( server_to_client ) 328 CALL pmci_client_datatrans( server_to_client ) 329 330 IF ( nesting_mode == 'two-way' ) THEN 331 CALL pmci_server_datatrans( client_to_server ) 332 CALL pmci_client_datatrans( client_to_server ) 333 ! 334 !-- Exchange_horiz is needed for all server-domains after the anterpolation 335 CALL exchange_horiz( u, nbgp ) 336 CALL exchange_horiz( v, nbgp ) 337 CALL exchange_horiz( w, nbgp ) 338 CALL exchange_horiz( pt, nbgp ) 339 IF ( .NOT. constant_diffusion ) CALL exchange_horiz( e, nbgp ) 340 intermediate_timestep_count = 0 341 CALL pres 342 ENDIF 343 ! 344 !-- Correct the w top-BC in nest domains to ensure mass conservation. 345 !-- Copy the interpolated/anterpolated boundary values to the _p 346 !-- arrays, too, to make sure the interpolated/anterpolated boundary 347 !-- values are carried over from one RK inner step to another. 348 !-- These actions must not be done for the root domain. 349 IF ( nest_domain ) THEN 350 CALL pmci_ensure_nest_mass_conservation 351 CALL pmci_update_new 352 ENDIF 353 336 354 ENDIF 337 !338 !-- Correct the w top-BC in nest domains to ensure mass conservation.339 !-- Copy the interpolated/anterpolated boundary values to the _p340 !-- arrays, too, to make sure the interpolated/anterpolated boundary341 !-- values are carried over from one RK inner step to another.342 !-- These actions must not be done for the root domain.343 IF ( nest_domain ) THEN344 CALL pmci_ensure_nest_mass_conservation345 CALL pmci_update_new346 ENDIF347 #endif348 355 349 356 #if defined( __dvrp_graphics ) … … 366 373 CALL timestep 367 374 368 #if defined( PMC_ACTIVE ) 369 ! 370 !-- TO_DO: try to give more detailed and meaningful comments here371 !-- Server side must be called first372 CALL pmci_server_synchronize373 CALL pmci_client_synchronize374 #endif 375 IF ( nested_run ) THEN 376 ! 377 !-- TO_DO: try to give more detailed and meaningful comments here 378 !-- Server side must be called first 379 CALL pmci_server_synchronize 380 CALL pmci_client_synchronize 381 ENDIF 375 382 ENDIF 376 383 … … 694 701 CALL swap_timelevel 695 702 696 #if defined( PMC_ACTIVE ) 697 ! 698 !-- TO_DO: try to give more meaningful comments here 699 !-- Domain nesting 700 !-- Note that the nesting operations are omitted intentionally on the 701 !-- first two RK-substeps. 702 CALL cpu_log( log_point(60), 'nesting', 'start' ) 703 ! 704 !-- From server to client commmunication ( direction=SERVER_TO_CLIENT ) 705 CALL pmci_server_datatrans( server_to_client ) 706 CALL pmci_client_datatrans( server_to_client ) 707 708 IF ( nesting_mode == 'two-way' ) THEN 709 ! 710 !-- From client to server commmunication ( direction=CLIENT_TO_SERVER ) 711 CALL pmci_server_datatrans( client_to_server ) 712 CALL pmci_client_datatrans( client_to_server ) 713 ! 714 !-- Exchange_horiz is needed for all server-domains after the 715 !-- anterpolation 716 CALL exchange_horiz( u, nbgp ) 717 CALL exchange_horiz( v, nbgp ) 718 CALL exchange_horiz( w, nbgp ) 719 CALL exchange_horiz( pt, nbgp ) 720 IF ( humidity .OR. passive_scalar ) THEN 721 CALL exchange_horiz( q, nbgp ) 722 ENDIF 723 IF ( .NOT. constant_diffusion ) CALL exchange_horiz( e, nbgp ) 724 ENDIF 725 ! 726 !-- Correct the w top-BC in nest domains to ensure mass conservation. 727 !-- This action must never be done for the root domain. 728 IF ( nest_domain ) THEN 729 CALL pmci_ensure_nest_mass_conservation 730 ! 731 !-- pmc_update_new is not necessary if nesting is made at each substep. 732 CALL pmci_update_new 733 ENDIF 734 735 CALL cpu_log( log_point(60), 'nesting', 'stop' ) 736 #endif 703 IF ( nested_run ) THEN 704 ! 705 !-- TO_DO: try to give more meaningful comments here 706 !-- Domain nesting 707 !-- Note that the nesting operations are omitted intentionally on the 708 !-- first two RK-substeps. 709 CALL cpu_log( log_point(60), 'nesting', 'start' ) 710 ! 711 !-- From server to client commmunication ( direction=SERVER_TO_CLIENT ) 712 CALL pmci_server_datatrans( server_to_client ) 713 CALL pmci_client_datatrans( server_to_client ) 714 715 IF ( nesting_mode == 'two-way' ) THEN 716 ! 717 !-- From client to server commmunication ( direction=CLIENT_TO_SERVER ) 718 CALL pmci_server_datatrans( client_to_server ) 719 CALL pmci_client_datatrans( client_to_server ) 720 ! 721 !-- Exchange_horiz is needed for all server-domains after the 722 !-- anterpolation 723 CALL exchange_horiz( u, nbgp ) 724 CALL exchange_horiz( v, nbgp ) 725 CALL exchange_horiz( w, nbgp ) 726 CALL exchange_horiz( pt, nbgp ) 727 IF ( humidity .OR. passive_scalar ) THEN 728 CALL exchange_horiz( q, nbgp ) 729 ENDIF 730 IF ( .NOT. constant_diffusion ) CALL exchange_horiz( e, nbgp ) 731 ENDIF 732 ! 733 !-- Correct the w top-BC in nest domains to ensure mass conservation. 734 !-- This action must never be done for the root domain. 735 IF ( nest_domain ) THEN 736 CALL pmci_ensure_nest_mass_conservation 737 ! 738 !-- pmc_update_new is not necessary if nesting is made at each 739 !-- substep 740 CALL pmci_update_new 741 ENDIF 742 743 CALL cpu_log( log_point(60), 'nesting', 'stop' ) 744 745 ENDIF 737 746 738 747 ! … … 1148 1157 ! 1149 1158 !-- Output elapsed simulated time in form of a progress bar on stdout 1150 !-- TO_DO: should be done by root domain later1151 #if ! defined( PMC_ACTIVE )1152 1159 IF ( myid == 0 ) CALL output_progress_bar 1153 #endif1154 1160 1155 1161 CALL cpu_log( log_point_s(10), 'timesteps', 'stop' ) … … 1158 1164 ENDDO ! time loop 1159 1165 1160 !-- TO_DO: should be done by root domain later1161 #if ! defined( PMC_ACTIVE )1162 1166 IF ( myid == 0 ) CALL finish_progress_bar 1163 #endif1164 1167 1165 1168 #if defined( __dvrp_graphics )
Note: See TracChangeset
for help on using the changeset viewer.