Changeset 1779 for palm/trunk/SOURCE
- Timestamp:
- Mar 3, 2016 8:01:28 AM (9 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/Makefile
r1767 r1779 20 20 # Current revisions: 21 21 # ------------------ 22 # 22 # dependencies changed for init_peprid 23 23 # 24 24 # Former revisions: … … 383 383 init_masks.o: modules.o mod_kinds.o 384 384 init_ocean.o: modules.o eqn_state_seawater.o mod_kinds.o 385 init_pegrid.o: modules.o mod_kinds.o pmc_interface.o385 init_pegrid.o: modules.o mod_kinds.o 386 386 init_pt_anomaly.o: modules.o mod_kinds.o 387 387 init_rankine.o: modules.o mod_kinds.o -
palm/trunk/SOURCE/check_open.f90
r1746 r1779 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! coupling_char is trimmed at every place it occurs, because it can have 22 ! different length now 22 23 ! 23 24 ! Former revisions: … … 274 275 !-- check_namelist_files! 275 276 IF ( check_restart == 2 ) THEN 276 OPEN ( 11, FILE='PARINF'// coupling_char, FORM='FORMATTED',&277 STATUS='OLD' )278 ELSE 279 OPEN ( 11, FILE='PARIN'// coupling_char, FORM='FORMATTED',&277 OPEN ( 11, FILE='PARINF'//TRIM( coupling_char ), & 278 FORM='FORMATTED', STATUS='OLD' ) 279 ELSE 280 OPEN ( 11, FILE='PARIN'//TRIM( coupling_char ), FORM='FORMATTED', & 280 281 STATUS='OLD' ) 281 282 END IF 282 283 #else 283 284 284 OPEN ( 11, FILE='PARIN'// coupling_char, FORM='FORMATTED',&285 OPEN ( 11, FILE='PARIN'//TRIM( coupling_char ), FORM='FORMATTED', & 285 286 STATUS='OLD' ) 286 287 #endif … … 289 290 290 291 IF ( myid_char == '' ) THEN 291 OPEN ( 13, FILE='BININ'// coupling_char//myid_char,&292 OPEN ( 13, FILE='BININ'//TRIM( coupling_char )//myid_char, & 292 293 FORM='UNFORMATTED', STATUS='OLD' ) 293 294 ELSE … … 296 297 !-- only this file contains the global variables 297 298 IF ( .NOT. openfile(file_id)%opened_before ) THEN 298 OPEN ( 13, FILE='BININ'//TRIM( coupling_char )//'/_000000', 299 OPEN ( 13, FILE='BININ'//TRIM( coupling_char )//'/_000000', & 299 300 FORM='UNFORMATTED', STATUS='OLD' ) 300 301 ELSE 301 OPEN ( 13, FILE='BININ'//TRIM( coupling_char )//'/'// myid_char,&302 FORM='UNFORMATTED', STATUS='OLD' )302 OPEN ( 13, FILE='BININ'//TRIM( coupling_char )//'/'// & 303 myid_char, FORM='UNFORMATTED', STATUS='OLD' ) 303 304 ENDIF 304 305 ENDIF … … 307 308 308 309 IF ( myid_char == '' ) THEN 309 OPEN ( 14, FILE='BINOUT'// coupling_char//myid_char,&310 OPEN ( 14, FILE='BINOUT'//TRIM( coupling_char )//myid_char, & 310 311 FORM='UNFORMATTED', POSITION='APPEND' ) 311 312 ELSE 312 313 IF ( myid == 0 .AND. .NOT. openfile(file_id)%opened_before ) THEN 313 CALL local_system( 'mkdir BINOUT' // coupling_char)314 CALL local_system( 'mkdir BINOUT' // TRIM( coupling_char ) ) 314 315 ENDIF 315 316 #if defined( __parallel ) && ! defined ( __check ) … … 325 326 CASE ( 15 ) 326 327 327 OPEN ( 15, FILE='RUN_CONTROL'//coupling_char, FORM='FORMATTED' ) 328 OPEN ( 15, FILE='RUN_CONTROL'//TRIM( coupling_char ), & 329 FORM='FORMATTED' ) 328 330 329 331 CASE ( 16 ) 330 332 331 OPEN ( 16, FILE='LIST_PROFIL'//coupling_char, FORM='FORMATTED' ) 333 OPEN ( 16, FILE='LIST_PROFIL'//TRIM( coupling_char ), & 334 FORM='FORMATTED' ) 332 335 333 336 CASE ( 17 ) 334 337 335 OPEN ( 17, FILE='LIST_PROFIL_1D'//coupling_char, FORM='FORMATTED' ) 338 OPEN ( 17, FILE='LIST_PROFIL_1D'//TRIM( coupling_char ), & 339 FORM='FORMATTED' ) 336 340 337 341 CASE ( 18 ) 338 342 339 OPEN ( 18, FILE='CPU_MEASURES'//coupling_char, FORM='FORMATTED' ) 343 OPEN ( 18, FILE='CPU_MEASURES'//TRIM( coupling_char ), & 344 FORM='FORMATTED' ) 340 345 341 346 CASE ( 19 ) 342 347 343 OPEN ( 19, FILE='HEADER'// coupling_char, FORM='FORMATTED' )348 OPEN ( 19, FILE='HEADER'//TRIM( coupling_char ), FORM='FORMATTED' ) 344 349 345 350 CASE ( 20 ) 346 351 347 352 IF ( myid == 0 .AND. .NOT. openfile(file_id)%opened_before ) THEN 348 CALL local_system( 'mkdir DATA_LOG' // coupling_char)353 CALL local_system( 'mkdir DATA_LOG' // TRIM( coupling_char ) ) 349 354 ENDIF 350 355 IF ( myid_char == '' ) THEN 351 OPEN ( 20, FILE='DATA_LOG'//TRIM( coupling_char )//'/_000000', 356 OPEN ( 20, FILE='DATA_LOG'//TRIM( coupling_char )//'/_000000', & 352 357 FORM='UNFORMATTED', POSITION='APPEND' ) 353 358 ELSE … … 358 363 CALL MPI_BARRIER( comm2d, ierr ) 359 364 #endif 360 OPEN ( 20, FILE='DATA_LOG'//TRIM( coupling_char )//'/'// myid_char,&361 FORM='UNFORMATTED', POSITION='APPEND' )365 OPEN ( 20, FILE='DATA_LOG'//TRIM( coupling_char )//'/'// & 366 myid_char, FORM='UNFORMATTED', POSITION='APPEND' ) 362 367 ENDIF 363 368 … … 368 373 FORM='UNFORMATTED', POSITION='APPEND' ) 369 374 ELSE 370 OPEN ( 21, FILE='PLOT2D_XY'// coupling_char, &375 OPEN ( 21, FILE='PLOT2D_XY'//TRIM( coupling_char ), & 371 376 FORM='UNFORMATTED', POSITION='APPEND' ) 372 377 ENDIF … … 401 406 FORM='UNFORMATTED', POSITION='APPEND' ) 402 407 ELSE 403 OPEN ( 22, FILE='PLOT2D_XZ'// coupling_char, FORM='UNFORMATTED',&404 POSITION='APPEND' )408 OPEN ( 22, FILE='PLOT2D_XZ'//TRIM( coupling_char ), & 409 FORM='UNFORMATTED', POSITION='APPEND' ) 405 410 ENDIF 406 411 … … 434 439 FORM='UNFORMATTED', POSITION='APPEND' ) 435 440 ELSE 436 OPEN ( 23, FILE='PLOT2D_YZ'// coupling_char, FORM='UNFORMATTED',&437 POSITION='APPEND' )441 OPEN ( 23, FILE='PLOT2D_YZ'//TRIM( coupling_char ), & 442 FORM='UNFORMATTED', POSITION='APPEND' ) 438 443 ENDIF 439 444 … … 541 546 ELSE 542 547 IF ( myid == 0 .AND. .NOT. openfile(80)%opened_before ) THEN 543 CALL local_system( 'mkdir PARTICLE_INFOS' // coupling_char ) 548 CALL local_system( 'mkdir PARTICLE_INFOS' // & 549 TRIM( coupling_char ) ) 544 550 ENDIF 545 551 #if defined( __parallel ) && ! defined ( __check ) … … 565 571 CASE ( 81 ) 566 572 567 OPEN ( 81, FILE='PLOTSP_X_PAR'//coupling_char, FORM='FORMATTED', & 568 DELIM='APOSTROPHE', RECL=1500, POSITION='APPEND' ) 573 OPEN ( 81, FILE='PLOTSP_X_PAR'//TRIM( coupling_char ), & 574 FORM='FORMATTED', DELIM='APOSTROPHE', RECL=1500, & 575 POSITION='APPEND' ) 569 576 570 577 CASE ( 82 ) 571 578 572 OPEN ( 82, FILE='PLOTSP_X_DATA'// coupling_char, FORM='FORMATTED',&573 POSITION = 'APPEND' )579 OPEN ( 82, FILE='PLOTSP_X_DATA'//TRIM( coupling_char ), & 580 FORM='FORMATTED', POSITION = 'APPEND' ) 574 581 575 582 CASE ( 83 ) 576 583 577 OPEN ( 83, FILE='PLOTSP_Y_PAR'//coupling_char, FORM='FORMATTED', & 578 DELIM='APOSTROPHE', RECL=1500, POSITION='APPEND' ) 584 OPEN ( 83, FILE='PLOTSP_Y_PAR'//TRIM( coupling_char ), & 585 FORM='FORMATTED', DELIM='APOSTROPHE', RECL=1500, & 586 POSITION='APPEND' ) 579 587 580 588 CASE ( 84 ) 581 589 582 OPEN ( 84, FILE='PLOTSP_Y_DATA'// coupling_char, FORM='FORMATTED',&583 POSITION='APPEND' )590 OPEN ( 84, FILE='PLOTSP_Y_DATA'//TRIM( coupling_char ), & 591 FORM='FORMATTED', POSITION='APPEND' ) 584 592 585 593 CASE ( 85 ) … … 590 598 ELSE 591 599 IF ( myid == 0 .AND. .NOT. openfile(85)%opened_before ) THEN 592 CALL local_system( 'mkdir PARTICLE_DATA' // coupling_char ) 600 CALL local_system( 'mkdir PARTICLE_DATA' // & 601 TRIM( coupling_char ) ) 593 602 ENDIF 594 603 #if defined( __parallel ) && ! defined ( __check ) … … 622 631 !-- Set filename depending on unit number 623 632 IF ( file_id == 101 ) THEN 624 filename = 'DATA_2D_XY_NETCDF' // coupling_char633 filename = 'DATA_2D_XY_NETCDF' // TRIM( coupling_char ) 625 634 av = 0 626 635 ELSE 627 filename = 'DATA_2D_XY_AV_NETCDF' // coupling_char636 filename = 'DATA_2D_XY_AV_NETCDF' // TRIM( coupling_char ) 628 637 av = 1 629 638 ENDIF … … 684 693 !-- Set filename depending on unit number 685 694 IF ( file_id == 102 ) THEN 686 filename = 'DATA_2D_XZ_NETCDF' // coupling_char695 filename = 'DATA_2D_XZ_NETCDF' // TRIM( coupling_char ) 687 696 av = 0 688 697 ELSE 689 filename = 'DATA_2D_XZ_AV_NETCDF' // coupling_char698 filename = 'DATA_2D_XZ_AV_NETCDF' // TRIM( coupling_char ) 690 699 av = 1 691 700 ENDIF … … 746 755 !-- Set filename depending on unit number 747 756 IF ( file_id == 103 ) THEN 748 filename = 'DATA_2D_YZ_NETCDF' // coupling_char757 filename = 'DATA_2D_YZ_NETCDF' // TRIM( coupling_char ) 749 758 av = 0 750 759 ELSE 751 filename = 'DATA_2D_YZ_AV_NETCDF' // coupling_char760 filename = 'DATA_2D_YZ_AV_NETCDF' // TRIM( coupling_char ) 752 761 av = 1 753 762 ENDIF … … 807 816 ! 808 817 !-- Set filename 809 filename = 'DATA_1D_PR_NETCDF' // coupling_char818 filename = 'DATA_1D_PR_NETCDF' // TRIM( coupling_char ) 810 819 811 820 ! … … 847 856 ! 848 857 !-- Set filename 849 filename = 'DATA_1D_TS_NETCDF' // coupling_char858 filename = 'DATA_1D_TS_NETCDF' // TRIM( coupling_char ) 850 859 851 860 ! … … 889 898 !-- Set filename depending on unit number 890 899 IF ( file_id == 106 ) THEN 891 filename = 'DATA_3D_NETCDF' // coupling_char900 filename = 'DATA_3D_NETCDF' // TRIM( coupling_char ) 892 901 av = 0 893 902 ELSE 894 filename = 'DATA_3D_AV_NETCDF' // coupling_char903 filename = 'DATA_3D_AV_NETCDF' // TRIM( coupling_char ) 895 904 av = 1 896 905 ENDIF … … 952 961 ! 953 962 !-- Set filename 954 filename = 'DATA_1D_SP_NETCDF' // coupling_char963 filename = 'DATA_1D_SP_NETCDF' // TRIM( coupling_char ) 955 964 956 965 ! … … 994 1003 995 1004 IF ( myid_char == '' ) THEN 996 filename = 'DATA_PRT_NETCDF' // coupling_char1005 filename = 'DATA_PRT_NETCDF' // TRIM( coupling_char ) 997 1006 ELSE 998 1007 filename = 'DATA_PRT_NETCDF' // TRIM( coupling_char ) // '/' // & … … 1055 1064 ! 1056 1065 !-- Set filename 1057 filename = 'DATA_1D_PTS_NETCDF' // coupling_char1066 filename = 'DATA_1D_PTS_NETCDF' // TRIM( coupling_char ) 1058 1067 1059 1068 ! … … 1097 1106 CASE ( 117 ) 1098 1107 1099 OPEN ( 117, FILE='PROGRESS'//coupling_char, STATUS='REPLACE', FORM='FORMATTED' ) 1108 OPEN ( 117, FILE='PROGRESS'//TRIM( coupling_char ), & 1109 STATUS='REPLACE', FORM='FORMATTED' ) 1100 1110 1101 1111 … … 1106 1116 mid = file_id - 200 1107 1117 WRITE ( mask_char,'(I2.2)') mid 1108 filename = 'DATA_MASK_' // mask_char // '_NETCDF' // coupling_char 1118 filename = 'DATA_MASK_' // mask_char // '_NETCDF' // & 1119 TRIM( coupling_char ) 1109 1120 av = 0 1110 1121 ELSE … … 1112 1123 WRITE ( mask_char,'(I2.2)') mid 1113 1124 filename = 'DATA_MASK_' // mask_char // '_AV_NETCDF' // & 1114 coupling_char1125 TRIM( coupling_char ) 1115 1126 av = 1 1116 1127 ENDIF -
palm/trunk/SOURCE/init_grid.f90
r1763 r1779 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! coupling_char is trimmed at every place it occurs, because it can have 22 ! different length now 22 23 ! 23 24 ! Former revisions: … … 89 90 ! 90 91 ! 1069 2012-11-28 16:18:43Z maronga 91 ! bugfix: added coupling_char to TOPOGRAPHY_DATA to allow topography in the ocean92 ! model in case of coupled runs92 ! bugfix: added coupling_char to TOPOGRAPHY_DATA to allow topography in the 93 ! ocean model in case of coupled runs 93 94 ! 94 95 ! 1036 2012-10-22 13:43:42Z raasch … … 680 681 !-- Arbitrary irregular topography data in PALM format (exactly 681 682 !-- matching the grid size and total domain size) 682 OPEN( 90, FILE='TOPOGRAPHY_DATA'// coupling_char, STATUS='OLD',&683 FORM='FORMATTED', ERR=10 )683 OPEN( 90, FILE='TOPOGRAPHY_DATA'//TRIM( coupling_char ), & 684 STATUS='OLD', FORM='FORMATTED', ERR=10 ) 684 685 DO j = ny, 0, -1 685 686 READ( 90, *, ERR=11, END=11 ) ( topo_height(j,i), i = 0,nx ) … … 688 689 GOTO 12 689 690 690 10 message_string = 'file TOPOGRAPHY'//coupling_char//' does not exist' 691 10 message_string = 'file TOPOGRAPHY'//TRIM( coupling_char )// & 692 ' does not exist' 691 693 CALL message( 'init_grid', 'PA0208', 1, 2, 0, 6, 0 ) 692 694 693 11 message_string = 'errors in file TOPOGRAPHY_DATA'//coupling_char 695 11 message_string = 'errors in file TOPOGRAPHY_DATA'// & 696 TRIM( coupling_char ) 694 697 CALL message( 'init_grid', 'PA0209', 1, 2, 0, 6, 0 ) 695 698 -
palm/trunk/SOURCE/init_pegrid.f90
r1765 r1779 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! changes regarding nested domain removed: virtual PE grid will be automatically 22 ! calculated for nested runs too 22 23 ! 23 24 ! Former revisions: … … 154 155 USE pegrid 155 156 156 USE pmc_interface, &157 ONLY: cpl_npex, cpl_npey, nested_run158 159 157 USE transpose_indices, & 160 158 ONLY: nxl_y, nxl_yd, nxl_z, nxr_y, nxr_yd, nxr_z, nyn_x, nyn_z, nys_x,& … … 217 215 .FALSE. ) 218 216 219 IF ( nested_run ) THEN 220 ! 221 !-- In case of nested-domain runs, the processor grid is explicitly given 222 !-- by the user in the nestpar-NAMELIST 223 pdims(1) = cpl_npex 224 pdims(2) = cpl_npey 217 ! 218 !-- Determine the processor topology or check it, if prescribed by the user 219 IF ( npex == -1 .AND. npey == -1 ) THEN 220 221 ! 222 !-- Automatic determination of the topology 223 numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) ) 224 pdims(1) = MAX( numproc_sqr , 1 ) 225 DO WHILE ( MOD( numprocs , pdims(1) ) /= 0 ) 226 pdims(1) = pdims(1) - 1 227 ENDDO 228 pdims(2) = numprocs / pdims(1) 229 230 ELSEIF ( npex /= -1 .AND. npey /= -1 ) THEN 231 232 ! 233 !-- Prescribed by user. Number of processors on the prescribed topology 234 !-- must be equal to the number of PEs available to the job 235 IF ( ( npex * npey ) /= numprocs ) THEN 236 WRITE( message_string, * ) 'number of PEs of the prescribed ', & 237 'topology (', npex*npey,') does not match & the number of ', & 238 'PEs available to the job (', numprocs, ')' 239 CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 ) 240 ENDIF 241 pdims(1) = npex 242 pdims(2) = npey 225 243 226 244 ELSE 227 245 ! 228 !-- Determine the processor topology or check it, if prescribed by the user 229 IF ( npex == -1 .AND. npey == -1 ) THEN 230 231 ! 232 !-- Automatic determination of the topology 233 numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) ) 234 pdims(1) = MAX( numproc_sqr , 1 ) 235 DO WHILE ( MOD( numprocs , pdims(1) ) /= 0 ) 236 pdims(1) = pdims(1) - 1 237 ENDDO 238 pdims(2) = numprocs / pdims(1) 239 240 ELSEIF ( npex /= -1 .AND. npey /= -1 ) THEN 241 242 ! 243 !-- Prescribed by user. Number of processors on the prescribed topology 244 !-- must be equal to the number of PEs available to the job 245 IF ( ( npex * npey ) /= numprocs ) THEN 246 WRITE( message_string, * ) 'number of PEs of the prescribed ', & 247 'topology (', npex*npey,') does not match & the number of ', & 248 'PEs available to the job (', numprocs, ')' 249 CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 ) 250 ENDIF 251 pdims(1) = npex 252 pdims(2) = npey 253 254 ELSE 255 ! 256 !-- If the processor topology is prescribed by the user, the number of 257 !-- PEs must be given in both directions 258 message_string = 'if the processor topology is prescribed by th' // & 259 'e user& both values of "npex" and "npey" must be given' // & 260 ' in the &NAMELIST-parameter file' 261 CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 ) 262 263 ENDIF 264 265 ENDIF 266 246 !-- If the processor topology is prescribed by the user, the number of 247 !-- PEs must be given in both directions 248 message_string = 'if the processor topology is prescribed by th' // & 249 'e user& both values of "npex" and "npey" must be given' // & 250 ' in the &NAMELIST-parameter file' 251 CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 ) 252 253 ENDIF 267 254 268 255 ! -
palm/trunk/SOURCE/interaction_droplets_ptq.f90
r1683 r1779 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! module procedure names shortened to avoid Intel compiler warnings about too 22 ! long names 22 23 ! 23 24 ! Former revisions: … … 60 61 61 62 INTERFACE interaction_droplets_ptq 62 MODULE PROCEDURE interaction_droplets_ptq 63 MODULE PROCEDURE interaction_droplets_ptq_ij 63 ! 64 !-- Internal names shortened in order ro avoid Intel compiler messages 65 !-- about too long names 66 MODULE PROCEDURE i_droplets_ptq 67 MODULE PROCEDURE i_droplets_ptq_ij 64 68 END INTERFACE interaction_droplets_ptq 65 69 … … 72 76 !> Call for all grid points 73 77 !------------------------------------------------------------------------------! 74 SUBROUTINE i nteraction_droplets_ptq78 SUBROUTINE i_droplets_ptq 75 79 76 80 USE arrays_3d, & … … 103 107 ENDDO 104 108 105 END SUBROUTINE i nteraction_droplets_ptq109 END SUBROUTINE i_droplets_ptq 106 110 107 111 … … 111 115 !> Call for grid point i,j 112 116 !------------------------------------------------------------------------------! 113 SUBROUTINE i nteraction_droplets_ptq_ij( i, j )117 SUBROUTINE i_droplets_ptq_ij( i, j ) 114 118 115 119 USE arrays_3d, & … … 139 143 ENDDO 140 144 141 END SUBROUTINE i nteraction_droplets_ptq_ij145 END SUBROUTINE i_droplets_ptq_ij 142 146 143 147 END MODULE interaction_droplets_ptq_mod -
palm/trunk/SOURCE/modules.f90
r1765 r1779 19 19 ! Current revisions: 20 20 ! ------------------ 21 ! 21 ! coupling_char extended to LEN=3 22 22 ! 23 23 ! Former revisions: … … 578 578 579 579 CHARACTER (LEN=1) :: cycle_mg = 'w', timestep_reason = ' ' 580 CHARACTER (LEN= 2) :: coupling_char = ''580 CHARACTER (LEN=3) :: coupling_char = '' 581 581 CHARACTER (LEN=5) :: write_binary = 'false' 582 582 CHARACTER (LEN=8) :: most_method = 'lookup', & !< NAMELIST parameter defining method to be used to calculate Okukhov length, -
palm/trunk/SOURCE/palm.f90
r1765 r1779 19 19 ! Current revisions: 20 20 ! ----------------- 21 ! 21 ! setting of nest_domain and coupling_char moved to the pmci 22 22 ! 23 23 ! Former revisions: … … 198 198 !-- be changed in init_pegrid). 199 199 IF ( nested_run ) THEN 200 !-- TO_DO: move the following two settings somewehere to the pmc_interface201 IF ( cpl_id >= 2 ) THEN202 nest_domain = .TRUE.203 WRITE( coupling_char, '(A1,I1.1)') '_', cpl_id204 ENDIF205 200 206 201 CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr ) -
palm/trunk/SOURCE/pmc_client.f90
r1765 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! kind=dp replaced by wp, dim_order removed 23 ! array management changed from linked list to sequential loop 23 24 ! 24 25 ! Former revisions: … … 50 51 USE kinds 51 52 USE PMC_general, ONLY: ClientDef, DA_NameDef, DA_Namelen, PMC_STATUS_OK, PMC_DA_NAME_ERR, PeDef, ArrayDef, & 52 DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_ G_GetName53 DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_MAX_ARRAY 53 54 USE PMC_handle_communicator, ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_server_comm 54 55 USE PMC_MPI_wrapper, ONLY: PMC_Send_to_Server, PMC_Recv_from_Server, PMC_Time, & … … 58 59 SAVE 59 60 60 ! data local to this MODULE61 61 Type(ClientDef) :: me 62 !-- TO_DO: what is the meaning of this? Could variables declared in this module 63 !-- also have single precision? 64 ! INTEGER, PARAMETER :: dp = wp 65 66 INTEGER, save :: myIndex = 0 !Counter and unique number for Data Arrays 62 63 INTEGER :: next_array_in_list = 0 64 INTEGER :: myIndex = 0 !Counter and unique number for Data Arrays 67 65 68 66 ! INTERFACE section … … 81 79 END INTERFACE PMC_C_Get_2D_index_list 82 80 81 INTERFACE PMC_C_clear_next_array_list 82 MODULE procedure PMC_C_clear_next_array_list 83 END INTERFACE PMC_C_clear_next_array_list 84 83 85 INTERFACE PMC_C_GetNextArray 84 86 MODULE procedure PMC_C_GetNextArray … … 105 107 106 108 PUBLIC PMC_ClientInit , PMC_Set_DataArray_Name, PMC_C_Get_2D_index_list 107 PUBLIC PMC_C_GetNextArray, PMC_C_Set_DataArray 108 PUBLIC PMC_C_setInd_and_AllocMem , PMC_C_GetBuffer, PMC_C_PutBuffer ! ,PMC_C_GetServerType109 PUBLIC PMC_C_GetNextArray, PMC_C_Set_DataArray, PMC_C_clear_next_array_list 110 PUBLIC PMC_C_setInd_and_AllocMem , PMC_C_GetBuffer, PMC_C_PutBuffer 109 111 110 112 CONTAINS … … 130 132 CALL MPI_Intercomm_merge (me%inter_comm, .true., me%intra_comm, istat); 131 133 CALL MPI_Comm_rank (me%intra_comm, me%intra_rank, istat); 132 133 134 ALLOCATE (me%PEs(me%inter_npes)) 134 135 136 ! 137 !-- Allocate for all Server PEs an array of TYPE ArrayDef to store information of transfer array 135 138 do i=1,me%inter_npes 136 NULLIFY(me%PEs(i)%Arrays)139 ALLOCATE(me%PEs(i)%array_list(PMC_MAX_ARRAY)) 137 140 end do 138 141 … … 142 145 END SUBROUTINE PMC_ClientInit 143 146 144 SUBROUTINE PMC_Set_DataArray_Name (ServerArrayDesc, ServerArrayName, ClientArrayDesc, ClientArrayName, istat , LastEntry)147 SUBROUTINE PMC_Set_DataArray_Name (ServerArrayDesc, ServerArrayName, ClientArrayDesc, ClientArrayName, istat) 145 148 IMPLICIT none 146 149 character(len=*),INTENT(IN) :: ServerArrayName … … 149 152 character(len=*),INTENT(IN) :: ClientArrayDesc 150 153 INTEGER,INTENT(OUT) :: istat 151 LOGICAL,INTENT(IN),optional :: LastEntry152 154 153 155 !-- local variables … … 192 194 CALL PMC_Bcast ( myName%NameOnClient, myPE, comm=m_to_server_comm) 193 195 194 if(present (LastEntry)) then195 CALL PMC_Set_DataArray_Name_LastEntry ( LastEntry = LastEntry)196 end if197 198 196 CALL PMC_G_SetName (me, myName%couple_index, myName%NameOnClient) 199 197 … … 231 229 INTEGER(KIND=MPI_ADDRESS_KIND) :: disp !: Displacement Unit (Integer = 4, floating poit = 8 232 230 INTEGER,DIMENSION(me%inter_npes*2) :: NrEle !: Number of Elements of a horizontal slice 233 TYPE(PeDef),POINTER :: aPE !: Pointer to PeDef str zcture231 TYPE(PeDef),POINTER :: aPE !: Pointer to PeDef structure 234 232 INTEGER(KIND=MPI_ADDRESS_KIND) :: WinSize !: Size of MPI window 2 (in bytes) 235 233 INTEGER,DIMENSION(:),POINTER :: myInd … … 299 297 END SUBROUTINE PMC_C_Get_2D_index_list 300 298 299 SUBROUTINE PMC_C_clear_next_array_list 300 IMPLICIT none 301 302 next_array_in_list = 0 303 304 return 305 END SUBROUTINE PMC_C_clear_next_array_list 306 307 ! List handling is still required to get minimal interaction with pmc_interface 301 308 LOGICAL function PMC_C_GetNextArray (myName) 302 309 character(len=*),INTENT(OUT) :: myName 303 310 304 311 !-- local variables 305 INTEGER :: MyCoupleIndex 306 LOGICAL :: MyLast !Last Array in List 307 character(len=DA_Namelen) :: loName 308 309 loName = 'NoName ' 310 MyLast = .true. 311 312 CALL PMC_G_GetName (me, MyCoupleIndex, loName, MyLast) 313 314 myName = trim(loName) 315 316 PMC_C_GetNextArray = .NOT. MyLast ! Return true if valid array 317 318 return 312 TYPE(PeDef),POINTER :: aPE 313 TYPE(ArrayDef),POINTER :: ar 314 315 next_array_in_list = next_array_in_list+1 316 317 !-- Array Names are the same on all client PE, so take first PE to get the name 318 aPE => me%PEs(1) 319 320 if(next_array_in_list > aPE%Nr_arrays) then 321 PMC_C_GetNextArray = .false. !all arrays done 322 return 323 end if 324 325 ar => aPE%array_list(next_array_in_list) 326 327 myName = ar%name 328 329 PMC_C_GetNextArray = .true. ! Return true if legal array 330 return 319 331 END function PMC_C_GetNextArray 320 332 321 333 SUBROUTINE PMC_C_Set_DataArray_2d (array) 334 322 335 IMPLICIT none 323 !-- TO_DO: is double precision absolutely required here? 324 REAL(kind=dp),INTENT(IN),DIMENSION(:,:) :: array 325 !-- local variables 326 INTEGER :: NrDims 327 INTEGER,DIMENSION (4) :: dims 328 INTEGER :: dim_order 329 TYPE(c_ptr) :: array_adr 330 INTEGER :: i 331 TYPE(PeDef),POINTER :: aPE 332 TYPE(ArrayDef),POINTER :: ar 336 337 REAL(wp), INTENT(IN) ,DIMENSION(:,:) :: array 338 339 INTEGER :: NrDims 340 INTEGER,DIMENSION (4) :: dims 341 TYPE(c_ptr) :: array_adr 342 INTEGER :: i 343 TYPE(PeDef),POINTER :: aPE 344 TYPE(ArrayDef),POINTER :: ar 333 345 334 346 … … 338 350 dims(1) = size(array,1) 339 351 dims(2) = size(array,2) 340 dim_order = 2341 352 342 353 array_adr = c_loc(array) … … 344 355 do i=1,me%inter_npes 345 356 aPE => me%PEs(i) 346 ar => aPE% Arrays357 ar => aPE%array_list(next_array_in_list) !actual array is last array in list 347 358 ar%NrDims = NrDims 348 359 ar%A_dim = dims 349 ar%dim_order = dim_order350 360 ar%data = array_adr 351 361 end do … … 355 365 356 366 SUBROUTINE PMC_C_Set_DataArray_3d (array) 367 357 368 IMPLICIT none 358 !-- TO_DO: is double precision absolutely required here? 359 REAL(kind=dp),INTENT(IN),DIMENSION(:,:,:) :: array 360 !-- local variables 361 INTEGER :: NrDims 362 INTEGER,DIMENSION (4) :: dims 363 INTEGER :: dim_order 364 TYPE(c_ptr) :: array_adr 365 INTEGER :: i 366 TYPE(PeDef),POINTER :: aPE 367 TYPE(ArrayDef),POINTER :: ar 369 370 REAL(wp),INTENT(IN),DIMENSION(:,:,:) :: array 371 372 INTEGER :: NrDims 373 INTEGER,DIMENSION (4) :: dims 374 TYPE(c_ptr) :: array_adr 375 INTEGER :: i 376 TYPE(PeDef),POINTER :: aPE 377 TYPE(ArrayDef),POINTER :: ar 368 378 369 379 dims = 1 … … 373 383 dims(2) = size(array,2) 374 384 dims(3) = size(array,3) 375 dim_order =33376 385 377 386 array_adr = c_loc(array) … … 379 388 do i=1,me%inter_npes 380 389 aPE => me%PEs(i) 381 ar => aPE% Arrays390 ar => aPE%array_list(next_array_in_list) !actual array is last array in list 382 391 ar%NrDims = NrDims 383 392 ar%A_dim = dims 384 ar%dim_order = dim_order385 393 ar%data = array_adr 386 394 end do … … 393 401 IMPLICIT none 394 402 395 INTEGER :: i, ierr 403 INTEGER :: i, ierr, j 396 404 INTEGER :: arlen, myIndex, tag 397 405 INTEGER(idp) :: bufsize ! Size of MPI data Window … … 412 420 tag = 200 413 421 414 do while (PMC_C_GetNextArray (myName))415 ar => aPE% Arrays422 do j=1,aPE%Nr_arrays 423 ar => aPE%array_list(j) 416 424 417 425 ! Receive Index from client … … 419 427 CALL MPI_Recv (myIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, MPI_STATUS_IGNORE, ierr) 420 428 421 if(ar% dim_order == 33) then ! PALM has k in first dimension429 if(ar%NrDims == 3) then ! PALM has k in first dimension 422 430 bufsize = max(bufsize,ar%A_dim(1)*ar%A_dim(2)*ar%A_dim(3)) ! determine max, because client buffer is allocated only once 423 431 else … … 442 450 aPE => me%PEs(i) 443 451 444 do while (PMC_C_GetNextArray (myName))445 ar => aPE% Arrays452 do j=1,aPE%Nr_arrays 453 ar => aPE%array_list(j) 446 454 ar%SendBuf = base_ptr 447 455 end do … … 452 460 453 461 SUBROUTINE PMC_C_GetBuffer (WaitTime) 462 454 463 IMPLICIT none 455 REAL(kind=dp),INTENT(OUT),optional :: WaitTime 464 465 REAL(wp), INTENT(OUT), optional :: WaitTime 456 466 457 467 !-- local variables 458 INTEGER :: ip, ij, ierr459 INTEGER :: nr! Number of Elements to getb from server460 INTEGER ::myIndex461 REAL( kind=dp) ::t1,t2462 TYPE(PeDef),POINTER ::aPE463 TYPE(ArrayDef),POINTER ::ar464 INTEGER,DIMENSION(1) ::buf_shape465 REAL( kind=wp),POINTER,DIMENSION(:) ::buf466 REAL( kind=wp),POINTER,DIMENSION(:,:) ::data_2d467 REAL( kind=wp),POINTER,DIMENSION(:,:,:) ::data_3d468 character(len=DA_Namelen) ::myName469 INTEGER(kind=MPI_ADDRESS_KIND) ::target_disp468 INTEGER :: ip, ij, ierr, j 469 INTEGER :: nr ! Number of Elements to getb from server 470 INTEGER :: myIndex 471 REAL(wp) :: t1,t2 472 TYPE(PeDef),POINTER :: aPE 473 TYPE(ArrayDef),POINTER :: ar 474 INTEGER,DIMENSION(1) :: buf_shape 475 REAL(wp),POINTER,DIMENSION(:) :: buf 476 REAL(wp),POINTER,DIMENSION(:,:) :: data_2d 477 REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d 478 character(len=DA_Namelen) :: myName 479 INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp 470 480 471 481 t1 = PMC_Time() 472 482 CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for server to fill buffer 473 t2 = PMC_Time() 474 if(present(WaitTime)) WaitTime = t2 -t1483 t2 = PMC_Time()-t1 484 if(present(WaitTime)) WaitTime = t2 475 485 476 486 CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for buffer is filled … … 479 489 aPE => me%PEs(ip) 480 490 481 do while (PMC_C_GetNextArray (myName))482 ar => aPE% Arrays483 if(ar% dim_order== 2) then491 do j=1,aPE%Nr_arrays 492 ar => aPE%array_list(j) 493 if(ar%NrDims == 2) then 484 494 nr = aPE%NrEle 485 else if(ar% dim_order == 33) then495 else if(ar%NrDims == 3) then 486 496 nr = aPE%NrEle*ar%A_dim(1) 487 497 end if … … 489 499 buf_shape(1) = nr 490 500 CALL c_f_pointer(ar%SendBuf, buf, buf_shape) 491 501 ! 502 !-- MPI passive target RMA 492 503 if(nr > 0) then 493 504 target_disp = (ar%BufIndex-1) … … 498 509 499 510 myIndex = 1 500 if(ar% dim_order== 2) then511 if(ar%NrDims == 2) then 501 512 502 513 CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) … … 505 516 myIndex = myIndex+1 506 517 end do 507 else if(ar% dim_order == 33) then518 else if(ar%NrDims == 3) then 508 519 CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) 509 520 do ij=1,aPE%NrEle … … 519 530 520 531 SUBROUTINE PMC_C_PutBuffer (WaitTime) 532 521 533 IMPLICIT none 522 REAL(kind=dp),INTENT(OUT),optional :: WaitTime 534 535 REAL(wp), INTENT(OUT), optional :: WaitTime 523 536 524 537 !-- local variables 525 INTEGER :: ip, ij, ierr526 INTEGER :: nr! Number of Elements to getb from server527 INTEGER ::myIndex528 REAL( kind=dp) ::t1,t2529 TYPE(PeDef),POINTER ::aPE530 TYPE(ArrayDef),POINTER ::ar531 INTEGER,DIMENSION(1) ::buf_shape532 REAL( kind=wp),POINTER,DIMENSION(:) ::buf533 REAL( kind=wp),POINTER,DIMENSION(:,:) ::data_2d534 REAL( kind=wp),POINTER,DIMENSION(:,:,:) ::data_3d535 character(len=DA_Namelen) ::myName536 INTEGER(kind=MPI_ADDRESS_KIND) ::target_disp538 INTEGER :: ip, ij, ierr, j 539 INTEGER :: nr ! Number of Elements to getb from server 540 INTEGER :: myIndex 541 REAL(wp) :: t1,t2 542 TYPE(PeDef),POINTER :: aPE 543 TYPE(ArrayDef),POINTER :: ar 544 INTEGER,DIMENSION(1) :: buf_shape 545 REAL(wp),POINTER,DIMENSION(:) :: buf 546 REAL(wp),POINTER,DIMENSION(:,:) :: data_2d 547 REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d 548 character(len=DA_Namelen) :: myName 549 INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp 537 550 538 551 … … 540 553 aPE => me%PEs(ip) 541 554 542 do while (PMC_C_GetNextArray (myName))543 ar => aPE% Arrays544 if(ar% dim_order== 2) then555 do j=1,aPE%Nr_arrays 556 ar => aPE%array_list(j) 557 if(ar%NrDims == 2) then 545 558 nr = aPE%NrEle 546 else if(ar% dim_order == 33) then559 else if(ar%NrDims == 3) then 547 560 nr = aPE%NrEle*ar%A_dim(1) 548 561 end if … … 552 565 553 566 myIndex = 1 554 if(ar% dim_order== 2) then567 if(ar%NrDims == 2) then 555 568 CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) 556 569 do ij=1,aPE%NrEle … … 558 571 myIndex = myIndex+1 559 572 end do 560 else if(ar% dim_order == 33) then573 else if(ar%NrDims == 3) then 561 574 CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) 562 575 do ij=1,aPE%NrEle … … 565 578 end do 566 579 end if 567 580 ! 581 !-- MPI passiv target RMA 568 582 if(nr > 0) then 569 583 target_disp = (ar%BufIndex-1) -
palm/trunk/SOURCE/pmc_general.f90
r1767 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! PMC_MPI_REAL removed, dim_order removed from type arraydef, 23 ! array management changed from linked list to sequential loop 23 24 ! 24 25 ! Former revisions: … … 62 63 INTEGER,parameter,PUBLIC :: PMC_DA_NAME_ERR = 10 63 64 65 INTEGER,parameter,PUBLIC :: PMC_MAX_ARRAY = 32 !Max Number of Array which can be coupled 64 66 INTEGER,parameter,PUBLIC :: PMC_MAX_MODELL = 64 65 ! TO_DO: the next variable doesn't seem to be used. Remove?66 INTEGER,parameter,PUBLIC :: PMC_MPI_REAL = MPI_DOUBLE_PRECISION67 67 INTEGER,parameter,PUBLIC :: DA_Desclen = 8 68 68 INTEGER,parameter,PUBLIC :: DA_Namelen = 16 … … 77 77 INTEGER :: NrDims ! Number of Dimensions 78 78 INTEGER,DIMENSION(4) :: A_dim ! Size of dimensions 79 INTEGER :: dim_order ! Order of Dimensions: 2 = 2D array, 33 = 3D array80 79 TYPE(c_ptr) :: data ! Pointer of data in server space 81 80 TYPE(c_ptr), DIMENSION(2) :: po_data ! Base Pointers, PMC_S_Set_Active_data_array sets active pointer … … 91 90 92 91 TYPE, PUBLIC :: PeDef 93 INTEGER (idp) :: NrEle ! Number of Elemets94 TYPE (xy_ind), POINTER,DIMENSION(:) :: locInd ! xy index local array for remote PE95 TYPE ( ArrayDef), POINTER :: Arrays ! Pointer to Data Array List (Type ArrayDef)96 TYPE( ArrayDef), POINTER :: ArrayStart ! Pointer to Star of the List92 INTEGER :: Nr_arrays=0 ! Number of arrays which will be transfered in this run 93 INTEGER :: NrEle ! Number of Elemets, same for all arrays 94 TYPE (xy_ind), POINTER,DIMENSION(:) :: locInd ! xy index local array for remote PE 95 TYPE( ArrayDef), POINTER, DIMENSION(:) :: array_list ! List of Data Arrays to be transfered 97 96 END TYPE PeDef 98 97 … … 122 121 end INTERFACE PMC_G_SetName 123 122 124 INTERFACE PMC_G_GetName125 MODULE procedure PMC_G_GetName126 end INTERFACE PMC_G_GetName127 128 123 INTERFACE PMC_sort 129 124 MODULE procedure sort_2d_i 130 125 end INTERFACE PMC_sort 131 126 132 PUBLIC PMC_G_SetName, PMC_ G_GetName, PMC_sort127 PUBLIC PMC_G_SetName, PMC_sort 133 128 134 129 … … 145 140 TYPE(PeDef),POINTER :: aPE 146 141 142 ! 143 !-- Assign array to next free index in array_list. 144 !-- Set name of array in ArrayDef structure 147 145 do i=1,myClient%inter_npes 148 146 aPE => myClient%PEs(i) 149 ar => aPE%Arrays 150 if(.not. associated (ar) ) then 151 ar => DA_List_append (ar, couple_index) 152 aPE%ArrayStart => ar 153 else 154 ar => DA_List_append (ar, couple_index) 155 endif 156 Ar%Name = trim(aName) // " " 157 myClient%PEs(i)%Arrays => ar 147 aPE%Nr_arrays = aPE%Nr_arrays+1 148 aPE%array_list(aPE%Nr_arrays)%name = aName 149 aPE%array_list(aPE%Nr_arrays)%coupleIndex = couple_index 158 150 end do 159 151 … … 161 153 end SUBROUTINE PMC_G_SetName 162 154 163 SUBROUTINE PMC_G_GetName (myClient, couple_index, aName, aLast,Client_PeIndex)164 IMPLICIT none165 166 TYPE(ClientDef),INTENT(INOUT) :: myClient167 INTEGER,INTENT(OUT) :: couple_index168 CHARACTER(LEN=*),INTENT(OUT) :: aName169 logical,INTENT(OUT) :: aLast170 INTEGER,INTENT(IN),optional :: Client_PeIndex171 172 INTEGER :: i,istart,istop173 TYPE(PeDef),POINTER :: aPE174 TYPE(ArrayDef),POINTER :: ar175 logical,save :: first=.true.176 177 aLast = .false.178 179 if(present(Client_PeIndex)) then !Loop over all Client PEs or just one selected via Client_PeIndex180 istart = Client_PeIndex181 istop = Client_PeIndex182 else183 istart = 1184 istop = myClient%inter_npes185 end if186 187 do i=istart,istop188 aPE => myClient%PEs(i)189 ar => aPE%Arrays190 if(first) then191 ar => aPE%ArrayStart192 else193 ar => aPE%Arrays194 ar => DA_List_next (ar)195 if(.not. associated (ar) ) then196 aLast = .true.197 first = .true. !Reset linked list to begin198 aPE%Arrays => ar199 end if200 endif201 aPE%Arrays => ar202 end do203 if(aLast) then204 return205 end if206 207 couple_index = ar%coupleIndex208 aName = ar%Name209 aLast = .false.210 211 first = .false.212 213 214 return215 END SUBROUTINE PMC_G_GetName216 155 217 156 SUBROUTINE sort_2d_i (array,sort_ind) … … 238 177 END SUBROUTINE sort_2d_i 239 178 240 ! Private section241 ! linked List routines for Data Array handling242 243 FUNCTION DA_List_append (node, couple_index)244 TYPE(ArrayDef),POINTER :: DA_List_append245 TYPE(ArrayDef),POINTER :: node246 INTEGER,INTENT(IN) :: couple_index247 248 !-- local variables249 TYPE(ArrayDef),POINTER :: ar250 251 if(.not. associated (node)) then252 ALLOCATE(ar)253 ar%coupleIndex = couple_index254 NULLIFY(ar%next)255 DA_List_append => ar256 else257 ALLOCATE(node%next)258 node%next%coupleIndex = couple_index259 NULLIFY(node%next%next)260 DA_List_append => node%next261 end if262 263 return264 END FUNCTION DA_List_append265 266 FUNCTION DA_List_next (node)267 TYPE(ArrayDef),POINTER :: DA_List_next268 TYPE(ArrayDef),POINTER :: node269 270 DA_List_next => node%next271 272 return273 END FUNCTION DA_List_next274 275 179 #endif 276 180 end MODULE pmc_general -
palm/trunk/SOURCE/pmc_handle_communicator.f90
r1765 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! only the total number of PEs is given in the nestpar-NAMELIST, 23 ! additional comments included 23 24 ! 24 25 ! Former revisions: … … 62 63 INTEGER :: id 63 64 INTEGER :: parent_id 64 INTEGER :: npe_x 65 INTEGER :: npe_y 65 INTEGER :: npe_total 66 66 67 67 REAL(wp) :: lower_left_x … … 155 155 start_pe(1) = 0 156 156 DO i = 2, m_nrofcpl+1 157 start_pe(i) = start_pe(i-1) + & 158 m_couplers(i-1)%npe_x * m_couplers(i-1)%npe_y 157 start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total 159 158 ENDDO 160 159 … … 162 161 !-- The number of cores provided with the run must be the same as the 163 162 !-- total sum of cores required by all nest domains 164 !-- TO_DO: can we use > instead of /= ?165 163 IF ( start_pe(m_nrofcpl+1) /= m_world_npes ) THEN 166 !-- TO_DO: this IF statement is redundant 167 IF ( m_world_rank == 0 ) THEN 168 WRITE ( message_string, '(A,I6,A,I6,A)' ) & 169 'nesting-setup requires more MPI procs (', & 170 start_pe(m_nrofcpl+1), ') than provided (', & 171 m_world_npes,')' 172 CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) 173 ENDIF 164 WRITE ( message_string, '(A,I6,A,I6,A)' ) & 165 'nesting-setup requires more MPI procs (', & 166 start_pe(m_nrofcpl+1), ') than provided (', & 167 m_world_npes,')' 168 CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) 174 169 ENDIF 175 170 … … 210 205 CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 211 206 CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 212 CALL MPI_BCAST( m_couplers(i)%npe_x, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 213 CALL MPI_BCAST( m_couplers(i)%npe_y, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 207 CALL MPI_BCAST( m_couplers(i)%npe_total, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) 214 208 CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) 215 209 CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) … … 233 227 CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, & 234 228 istat ) 235 IF ( istat /= MPI_SUCCESS ) THEN236 !237 !-- TO_DO: replace by message-call238 !-- TO_DO: Can this really happen, or is this just for the debugging phase?239 IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in MPI_Comm_split '240 CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat )241 ENDIF242 243 229 ! 244 230 !-- Get size and rank of the model running on this PE … … 256 242 257 243 ! 258 !-- TO_DO: describe what is happening here, and why244 !-- Save the current model communicator for PMC internal use 259 245 m_model_comm = comm 260 246 … … 268 254 IF ( m_couplers(i)%parent_id == m_my_cpl_id ) THEN 269 255 ! 270 !-- Collect server PEs 271 !-- TO_DO: explain in more details, what is done here 256 !-- Collect server PEs. 257 !-- Every model exept the root model has a parent model which acts as 258 !-- server model. Create an intercommunicator to connect current PE to 259 !-- all client PEs 272 260 tag = 500 + i 273 261 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), & … … 278 266 ELSEIF ( i == m_my_cpl_id) THEN 279 267 ! 280 !-- Collect client PEs 281 !-- TO_DO: explain in more detail, what is happening here 268 !-- Collect client PEs. 269 !-- Every model exept the root model has a paremt model which acts as 270 !-- server model. Create an intercommunicator to connect current PE to 271 !-- all server PEs 282 272 tag = 500 + i 283 273 CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, & 284 274 start_pe(m_couplers(i)%parent_id), & 285 275 tag, m_to_server_comm, istat ) 286 ENDIF287 288 IF ( istat /= MPI_SUCCESS ) THEN289 !290 !-- TO_DO: replace by message-call291 !-- TO_DO: can this really happen, or is this just for debugging?292 IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in Coupler Setup '293 CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat )294 276 ENDIF 295 277 … … 309 291 ENDIF 310 292 ENDDO 311 !-- TO_DO: explain why this is done312 pmc_server_for_client(clientcount+1) = -1313 314 293 ! 315 294 !-- Get the size of the server model 316 !-- TO_DO: what does "size" mean here? Number of PEs?317 295 IF ( m_my_cpl_id > 1 ) THEN 318 296 CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size, & … … 337 315 ! 338 316 !-- Make module private variables available to palm 339 !-- TO_DO: why can't they be available from the beginning, i.e. why do they340 !-- first have to be declared as different private variables?341 317 SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, & 342 npe_x, npe_y, lower_left_x, & 343 lower_left_y ) 318 npe_total, lower_left_x, lower_left_y ) 344 319 345 320 USE kinds … … 350 325 INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_id 351 326 INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_parent_id 352 INTEGER, INTENT(OUT), OPTIONAL :: npe_x 353 INTEGER, INTENT(OUT), OPTIONAL :: npe_y 327 INTEGER, INTENT(OUT), OPTIONAL :: npe_total 354 328 REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_x 355 329 REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_y 356 330 357 !-- TO_DO: is the PRESENT clause really required here?358 331 IF ( PRESENT( my_cpl_id ) ) my_cpl_id = m_my_cpl_id 359 332 IF ( PRESENT( my_cpl_parent_id ) ) my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id 360 333 IF ( PRESENT( cpl_name ) ) cpl_name = m_couplers(my_cpl_id)%name 361 IF ( PRESENT( npe_x ) ) npe_x = m_couplers(my_cpl_id)%npe_x 362 IF ( PRESENT( npe_y ) ) npe_y = m_couplers(my_cpl_id)%npe_y 334 IF ( PRESENT( npe_total ) ) npe_total = m_couplers(my_cpl_id)%npe_total 363 335 IF ( PRESENT( lower_left_x ) ) lower_left_x = m_couplers(my_cpl_id)%lower_left_x 364 336 IF ( PRESENT( lower_left_y ) ) lower_left_y = m_couplers(my_cpl_id)%lower_left_y … … 378 350 379 351 380 381 !-- TO_DO: what does this comment mean?382 ! Private SUBROUTINEs383 352 SUBROUTINE read_coupling_layout( nesting_mode, pmc_status ) 384 353 … … 438 407 439 408 IF ( m_couplers(i)%id /= -1 .AND. i <= pmc_max_modell ) THEN 440 WRITE ( 0, '(A,A,1X, 4I7,1X,2F10.2)' ) 'Set up Model ', &441 TRIM( m_couplers(i)%name ), m_couplers(i)%id,&442 m_couplers(i)%Parent_id, m_couplers(i)%npe_x,&443 m_couplers(i)%npe_y, m_couplers(i)%lower_left_x,&444 409 WRITE ( 0, '(A,A,1X,3I7,1X,2F10.2)' ) 'Set up Model ', & 410 TRIM( m_couplers(i)%name ), m_couplers(i)%id, & 411 m_couplers(i)%Parent_id, m_couplers(i)%npe_total, & 412 m_couplers(i)%lower_left_x, & 413 m_couplers(i)%lower_left_y 445 414 ELSE 446 415 ! -
palm/trunk/SOURCE/pmc_interface.f90
r1767 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! only the total number of PEs is given for the domains, npe_x and npe_y 23 ! replaced by npe_total, 24 ! array management changed from linked list to sequential loop 23 25 ! 24 26 ! Former revisions: … … 59 61 60 62 USE control_parameters, & 61 ONLY: dt_3d, dz, humidity, message_string, nest_bound_l, & 62 nest_bound_r, nest_bound_s, nest_bound_n, passive_scalar, & 63 simulated_time, topography, volume_flow 63 ONLY: coupling_char, dt_3d, dz, humidity, message_string, & 64 nest_bound_l, nest_bound_r, nest_bound_s, nest_bound_n, & 65 nest_domain, passive_scalar, simulated_time, topography, & 66 volume_flow 64 67 65 68 USE cpulog, & … … 88 91 89 92 USE pmc_client, & 90 ONLY: pmc_clientinit, pmc_c_getnextarray, pmc_c_get_2d_index_list, & 91 pmc_c_getbuffer, pmc_c_putbuffer, pmc_c_setind_and_allocmem, & 93 ONLY: pmc_clientinit, pmc_c_clear_next_array_list, & 94 pmc_c_getnextarray, pmc_c_get_2d_index_list, pmc_c_getbuffer, & 95 pmc_c_putbuffer, pmc_c_setind_and_allocmem, & 92 96 pmc_c_set_dataarray, pmc_set_dataarray_name 93 97 … … 104 108 105 109 USE pmc_server, & 106 ONLY: pmc_serverinit, pmc_s_ fillbuffer, pmc_s_getdata_from_buffer,&107 pmc_s_get nextarray, pmc_s_setind_and_allocmem, &108 pmc_s_set _active_data_array, pmc_s_set_dataarray,&109 pmc_s_set_ 2d_index_list110 ONLY: pmc_serverinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer, & 111 pmc_s_getdata_from_buffer, pmc_s_getnextarray, & 112 pmc_s_setind_and_allocmem, pmc_s_set_active_data_array, & 113 pmc_s_set_dataarray, pmc_s_set_2d_index_list 110 114 111 115 #endif … … 116 120 !-- limit. Try to reduce as much as possible 117 121 118 !-- TO_DO: shouldn't we use public as default here? Only a minority of the119 !-- variables is private.122 !-- TO_DO: are all of these variables following now really PUBLIC? 123 !-- Klaus and I guess they are not 120 124 PRIVATE !: Note that the default publicity is here set to private. 121 125 … … 129 133 INTEGER(iwp), PUBLIC, SAVE :: cpl_id = 1 !: 130 134 CHARACTER(LEN=32), PUBLIC, SAVE :: cpl_name !: 131 INTEGER(iwp), PUBLIC, SAVE :: cpl_npex !: 132 INTEGER(iwp), PUBLIC, SAVE :: cpl_npey !: 135 INTEGER(iwp), PUBLIC, SAVE :: cpl_npe_total !: 133 136 INTEGER(iwp), PUBLIC, SAVE :: cpl_parent_id !: 134 137 … … 266 269 !-- Module private variables. 267 270 INTEGER(iwp), DIMENSION(3) :: define_coarse_grid_int !: 268 REAL(wp), DIMENSION( 9) :: define_coarse_grid_real !:271 REAL(wp), DIMENSION(7) :: define_coarse_grid_real !: 269 272 270 273 TYPE coarsegrid_def … … 364 367 ! 365 368 !-- This is not a nested run 366 !367 !-- TO_DO: this wouldn't be required any more?368 369 world_comm = MPI_COMM_WORLD 369 370 cpl_id = 1 370 371 cpl_name = "" 371 cpl_npex = 2 372 cpl_npey = 2 373 lower_left_coord_x = 0.0_wp 374 lower_left_coord_y = 0.0_wp 372 375 373 RETURN 376 ELSE 377 ! 378 !-- Set the general steering switch which tells PALM that its a nested run 379 nested_run = .TRUE. 374 380 375 ENDIF 381 376 377 ! 378 !-- Set the general steering switch which tells PALM that its a nested run 379 nested_run = .TRUE. 380 381 ! 382 !-- Get some variables required by the pmc-interface (and in some cases in the 383 !-- PALM code out of the pmci) out of the pmc-core 382 384 CALL pmc_get_local_model_info( my_cpl_id = cpl_id, & 383 385 my_cpl_parent_id = cpl_parent_id, & 384 386 cpl_name = cpl_name, & 385 npe_ x = cpl_npex, npe_y = cpl_npey,&387 npe_total = cpl_npe_total, & 386 388 lower_left_x = lower_left_coord_x, & 387 389 lower_left_y = lower_left_coord_y ) 390 ! 391 !-- Set the steering switch which tells the models that they are nested (of 392 !-- course the root domain (cpl_id = 1 ) is not nested) 393 IF ( cpl_id >= 2 ) THEN 394 nest_domain = .TRUE. 395 WRITE( coupling_char, '(A1,I2.2)') '_', cpl_id 396 ENDIF 397 388 398 ! 389 399 !-- Message that communicators for nesting are initialized. … … 493 503 define_coarse_grid_real(1) = lower_left_coord_x 494 504 define_coarse_grid_real(2) = lower_left_coord_y 495 !-- TO_DO: remove this? 496 define_coarse_grid_real(3) = 0 ! KK currently not used. 497 define_coarse_grid_real(4) = 0 498 define_coarse_grid_real(5) = dx 499 define_coarse_grid_real(6) = dy 500 define_coarse_grid_real(7) = lower_left_coord_x + ( nx + 1 ) * dx 501 define_coarse_grid_real(8) = lower_left_coord_y + ( ny + 1 ) * dy 502 define_coarse_grid_real(9) = dz 505 define_coarse_grid_real(3) = dx 506 define_coarse_grid_real(4) = dy 507 define_coarse_grid_real(5) = lower_left_coord_x + ( nx + 1 ) * dx 508 define_coarse_grid_real(6) = lower_left_coord_y + ( ny + 1 ) * dy 509 define_coarse_grid_real(7) = dz 503 510 504 511 define_coarse_grid_int(1) = nx … … 512 519 yez = ( nbgp + 1 ) * dy 513 520 IF ( cl_coord_x(0) < define_coarse_grid_real(1) + xez ) nomatch = 1 514 IF ( cl_coord_x(nx_cl + 1) > define_coarse_grid_real( 7) - xez ) nomatch = 1521 IF ( cl_coord_x(nx_cl + 1) > define_coarse_grid_real(5) - xez ) nomatch = 1 515 522 IF ( cl_coord_y(0) < define_coarse_grid_real(2) + yez ) nomatch = 1 516 IF ( cl_coord_y(ny_cl + 1) > define_coarse_grid_real( 8) - yez ) nomatch = 1523 IF ( cl_coord_y(ny_cl + 1) > define_coarse_grid_real(6) - yez ) nomatch = 1 517 524 518 525 DEALLOCATE( cl_coord_x ) … … 521 528 ! 522 529 !-- Send coarse grid information to client 523 CALL pmc_send_to_client( client_id, Define_coarse_grid_real, 9, 0, & 530 CALL pmc_send_to_client( client_id, Define_coarse_grid_real, & 531 SIZE(define_coarse_grid_real), 0, & 524 532 21, ierr ) 525 533 CALL pmc_send_to_client( client_id, Define_coarse_grid_int, 3, 0, & … … 553 561 ! 554 562 !-- Include couple arrays into server content 563 CALL pmc_s_clear_next_array_list 555 564 DO WHILE ( pmc_s_getnextarray( client_id, myname ) ) 556 565 CALL pmci_set_array_pointer( myname, client_id = client_id, & … … 676 685 IF ( .NOT. pmc_is_rootmodel() ) THEN 677 686 CALL pmc_clientinit 678 687 ! 688 !-- Here and only here the arrays are defined, which actualy will be 689 !-- exchanged between client and server. 690 !-- Please check, if the arrays are in the list of possible exchange arrays 691 !-- in subroutines: 692 !-- pmci_set_array_pointer (for server arrays) 693 !-- pmci_create_client_arrays (for client arrays) 679 694 CALL pmc_set_dataarray_name( 'coarse', 'u' ,'fine', 'u', ierr ) 680 695 CALL pmc_set_dataarray_name( 'coarse', 'v' ,'fine', 'v', ierr ) … … 709 724 ! 710 725 !-- Receive Coarse grid information. 711 CALL pmc_recv_from_server( define_coarse_grid_real, 9, 0, 21, ierr ) 726 CALL pmc_recv_from_server( define_coarse_grid_real, & 727 SIZE(define_coarse_grid_real), 0, 21, ierr ) 712 728 CALL pmc_recv_from_server( define_coarse_grid_int, 3, 0, 22, ierr ) 713 729 … … 719 735 WRITE(0,*) 'startx_tot = ',define_coarse_grid_real(1) 720 736 WRITE(0,*) 'starty_tot = ',define_coarse_grid_real(2) 721 WRITE(0,*) 'endx_tot = ',define_coarse_grid_real( 7)722 WRITE(0,*) 'endy_tot = ',define_coarse_grid_real( 8)723 WRITE(0,*) 'dx = ',define_coarse_grid_real( 5)724 WRITE(0,*) 'dy = ',define_coarse_grid_real( 6)725 WRITE(0,*) 'dz = ',define_coarse_grid_real( 9)737 WRITE(0,*) 'endx_tot = ',define_coarse_grid_real(5) 738 WRITE(0,*) 'endy_tot = ',define_coarse_grid_real(6) 739 WRITE(0,*) 'dx = ',define_coarse_grid_real(3) 740 WRITE(0,*) 'dy = ',define_coarse_grid_real(4) 741 WRITE(0,*) 'dz = ',define_coarse_grid_real(7) 726 742 WRITE(0,*) 'nx_coarse = ',define_coarse_grid_int(1) 727 743 WRITE(0,*) 'ny_coarse = ',define_coarse_grid_int(2) … … 729 745 ENDIF 730 746 731 CALL MPI_BCAST( define_coarse_grid_real, 9, MPI_REAL, 0, comm2d, ierr ) 747 CALL MPI_BCAST( define_coarse_grid_real, SIZE(define_coarse_grid_real), & 748 MPI_REAL, 0, comm2d, ierr ) 732 749 CALL MPI_BCAST( define_coarse_grid_int, 3, MPI_INTEGER, 0, comm2d, ierr ) 733 750 734 cg%dx = define_coarse_grid_real( 5)735 cg%dy = define_coarse_grid_real( 6)736 cg%dz = define_coarse_grid_real( 9)751 cg%dx = define_coarse_grid_real(3) 752 cg%dy = define_coarse_grid_real(4) 753 cg%dz = define_coarse_grid_real(7) 737 754 cg%nx = define_coarse_grid_int(1) 738 755 cg%ny = define_coarse_grid_int(2) … … 778 795 ! 779 796 !-- Include couple arrays into client content. 797 CALL pmc_c_clear_next_array_list 780 798 DO WHILE ( pmc_c_getnextarray( myname ) ) 781 799 !-- TO_DO: Klaus, why the c-arrays are still up to cg%nz?? … … 880 898 ENDIF 881 899 ENDDO 882 883 WRITE( 0, * ) 'Coarse area ', myid, icl, icr, jcs, jcn884 900 885 901 coarse_bound(1) = icl … … 3397 3413 ! 3398 3414 !-- List of array names, which can be coupled 3415 !-- In case of 3D please change also the second array for the pointer version 3399 3416 IF ( TRIM(name) == "u" ) p_3d => u 3400 3417 IF ( TRIM(name) == "v" ) p_3d => v … … 3403 3420 IF ( TRIM(name) == "pt" ) p_3d => pt 3404 3421 IF ( TRIM(name) == "q" ) p_3d => q 3405 !IF ( TRIM(name) == "z0" ) p_2d => z0 3422 ! 3423 !-- This is just an example for a 2D array, not active for coupling 3424 !-- Please note, that z0 has to be declared as TARGET array in modules.f90 3425 ! IF ( TRIM(name) == "z0" ) p_2d => z0 3406 3426 3407 3427 #if defined( __nopointer ) … … 3431 3451 IF ( TRIM(name) == "e" ) p_3d_sec => e_2 3432 3452 IF ( TRIM(name) == "pt" ) p_3d_sec => pt_2 3433 !IF ( TRIM(name) == "z0" ) p_2d_sec => z0_23453 IF ( TRIM(name) == "q" ) p_3d_sec => q_2 3434 3454 3435 3455 IF ( ASSOCIATED( p_3d ) ) THEN … … 3437 3457 array_2 = p_3d_sec ) 3438 3458 ELSEIF ( ASSOCIATED( p_2d ) ) THEN 3439 CALL pmc_s_set_dataarray( client_id, p_2d , array_2 = p_2d_sec)3459 CALL pmc_s_set_dataarray( client_id, p_2d ) 3440 3460 ELSE 3441 3461 ! -
palm/trunk/SOURCE/pmc_mpi_wrapper.f90
r1765 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! kind=dp replaced by wp 23 23 ! 24 24 ! Former revisions: … … 53 53 SAVE 54 54 55 !-- TO_DO: what is the meaning of this? Could variables declared in this module56 !-- also have single precision?57 ! INTEGER, PARAMETER :: dp = wp58 59 60 ! INTERFACE section61 62 55 INTERFACE PMC_Send_to_Server 63 56 MODULE PROCEDURE PMC_Send_to_Server_INTEGER … … 159 152 160 153 SUBROUTINE PMC_Send_to_Server_real_r1 (buf, n, Server_rank, tag, ierr) 161 IMPLICIT none 162 !-- TO_DO: has buf always to be of dp-kind, or can wp used here163 !-- this effects all respective declarations in this file 164 REAL( kind=dp), DIMENSION(:), INTENT(IN) ::buf165 INTEGER, INTENT(IN) ::n166 INTEGER, INTENT(IN) ::Server_rank167 INTEGER, INTENT(IN) ::tag168 INTEGER, INTENT(OUT) ::ierr169 170 ierr = 0 171 CALL MPI_Send (buf, n, MPI_ DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, ierr)154 155 IMPLICIT none 156 157 REAL(wp), DIMENSION(:), INTENT(IN) :: buf 158 INTEGER, INTENT(IN) :: n 159 INTEGER, INTENT(IN) :: Server_rank 160 INTEGER, INTENT(IN) :: tag 161 INTEGER, INTENT(OUT) :: ierr 162 163 ierr = 0 164 CALL MPI_Send (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, ierr) 172 165 173 166 return … … 175 168 176 169 SUBROUTINE PMC_Recv_from_Server_real_r1 (buf, n, Server_rank, tag, ierr) 177 IMPLICIT none 178 REAL(kind=dp), DIMENSION(:), INTENT(OUT) :: buf 179 INTEGER, INTENT(IN) :: n 180 INTEGER, INTENT(IN) :: Server_rank 181 INTEGER, INTENT(IN) :: tag 182 INTEGER, INTENT(OUT) :: ierr 183 184 ierr = 0 185 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, & 186 MPI_STATUS_IGNORE, ierr) 170 171 IMPLICIT none 172 173 REAL(wp), DIMENSION(:), INTENT(OUT) :: buf 174 INTEGER, INTENT(IN) :: n 175 INTEGER, INTENT(IN) :: Server_rank 176 INTEGER, INTENT(IN) :: tag 177 INTEGER, INTENT(OUT) :: ierr 178 179 ierr = 0 180 CALL MPI_Recv (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, & 181 MPI_STATUS_IGNORE, ierr) 187 182 188 183 return … … 190 185 191 186 SUBROUTINE PMC_Send_to_Server_real_r2 (buf, n, Server_rank, tag, ierr) 192 IMPLICIT none 193 REAL(kind=dp), DIMENSION(:,:), INTENT(IN) :: buf 194 INTEGER, INTENT(IN) :: n 195 INTEGER, INTENT(IN) :: Server_rank 196 INTEGER, INTENT(IN) :: tag 197 INTEGER, INTENT(OUT) :: ierr 198 199 ierr = 0 200 CALL MPI_Send (buf, n, MPI_DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, ierr) 187 188 IMPLICIT none 189 190 REAL(wp), DIMENSION(:,:), INTENT(IN) :: buf 191 INTEGER, INTENT(IN) :: n 192 INTEGER, INTENT(IN) :: Server_rank 193 INTEGER, INTENT(IN) :: tag 194 INTEGER, INTENT(OUT) :: ierr 195 196 ierr = 0 197 CALL MPI_Send (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, ierr) 201 198 202 199 return … … 204 201 205 202 SUBROUTINE PMC_Recv_from_Server_real_r2 (buf, n, Server_rank, tag, ierr) 206 IMPLICIT none 207 REAL(kind=dp), DIMENSION(:,:),INTENT(OUT) :: buf 208 INTEGER, INTENT(IN) :: n 209 INTEGER, INTENT(IN) :: Server_rank 210 INTEGER, INTENT(IN) :: tag 211 INTEGER, INTENT(OUT) :: ierr 212 213 ierr = 0 214 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, & 203 204 IMPLICIT none 205 206 REAL(wp), DIMENSION(:,:), INTENT(OUT) :: buf 207 INTEGER, INTENT(IN) :: n 208 INTEGER, INTENT(IN) :: Server_rank 209 INTEGER, INTENT(IN) :: tag 210 INTEGER, INTENT(OUT) :: ierr 211 212 ierr = 0 213 CALL MPI_Recv (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, & 215 214 MPI_STATUS_IGNORE, ierr) 216 215 … … 219 218 220 219 SUBROUTINE PMC_Send_to_Server_real_r3 (buf, n, Server_rank, tag, ierr) 221 IMPLICIT none 222 REAL(kind=dp), DIMENSION(:,:,:), INTENT(IN) :: buf 223 INTEGER, INTENT(IN) :: n 224 INTEGER, INTENT(IN) :: Server_rank 225 INTEGER, INTENT(IN) :: tag 226 INTEGER, INTENT(OUT) :: ierr 227 228 ierr = 0 229 CALL MPI_Send (buf, n, MPI_DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, ierr) 220 221 IMPLICIT none 222 223 REAL(wp), DIMENSION(:,:,:), INTENT(IN) :: buf 224 INTEGER, INTENT(IN) :: n 225 INTEGER, INTENT(IN) :: Server_rank 226 INTEGER, INTENT(IN) :: tag 227 INTEGER, INTENT(OUT) :: ierr 228 229 ierr = 0 230 CALL MPI_Send (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, ierr) 230 231 231 232 return … … 233 234 234 235 SUBROUTINE PMC_Recv_from_Server_real_r3 (buf, n, Server_rank, tag, ierr) 235 IMPLICIT none 236 REAL(kind=dp), DIMENSION(:,:,:),INTENT(OUT) :: buf 237 INTEGER, INTENT(IN) :: n 238 INTEGER, INTENT(IN) :: Server_rank 239 INTEGER, INTENT(IN) :: tag 240 INTEGER, INTENT(OUT) :: ierr 241 242 ierr = 0 243 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Server_rank, tag, m_to_server_comm, & 236 237 IMPLICIT none 238 239 REAL(wp), DIMENSION(:,:,:), INTENT(OUT) :: buf 240 INTEGER, INTENT(IN) :: n 241 INTEGER, INTENT(IN) :: Server_rank 242 INTEGER, INTENT(IN) :: tag 243 INTEGER, INTENT(OUT) :: ierr 244 245 ierr = 0 246 CALL MPI_Recv (buf, n, MPI_REAL, Server_rank, tag, m_to_server_comm, & 244 247 MPI_STATUS_IGNORE, ierr) 245 248 … … 296 299 297 300 SUBROUTINE PMC_Send_to_Client_real_r1 (Client_id, buf, n, Client_rank, tag, ierr) 298 IMPLICIT none 299 INTEGER, INTENT(IN) :: Client_id 300 REAL(kind=dp), DIMENSION(:), INTENT(IN) :: buf 301 INTEGER, INTENT(IN) :: n 302 INTEGER, INTENT(IN) :: Client_rank 303 INTEGER, INTENT(IN) :: tag 304 INTEGER, INTENT(OUT) :: ierr 305 306 ierr = 0 307 CALL MPI_Send (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 301 302 IMPLICIT none 303 304 INTEGER, INTENT(IN) :: Client_id 305 REAL(wp), DIMENSION(:), INTENT(IN) :: buf 306 INTEGER, INTENT(IN) :: n 307 INTEGER, INTENT(IN) :: Client_rank 308 INTEGER, INTENT(IN) :: tag 309 INTEGER, INTENT(OUT) :: ierr 310 311 ierr = 0 312 CALL MPI_Send (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 308 313 ierr) 309 314 … … 312 317 313 318 SUBROUTINE PMC_Recv_from_Client_real_r1 (Client_id, buf, n, Client_rank, tag, ierr) 314 IMPLICIT none 315 INTEGER, INTENT(IN) :: Client_id 316 REAL(kind=dp), DIMENSION(:), INTENT(INOUT):: buf 317 INTEGER, INTENT(IN) :: n 318 INTEGER, INTENT(IN) :: Client_rank 319 INTEGER, INTENT(IN) :: tag 320 INTEGER, INTENT(OUT) :: ierr 321 322 ierr = 0 323 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 319 320 IMPLICIT none 321 322 INTEGER, INTENT(IN) :: Client_id 323 REAL(wp), DIMENSION(:), INTENT(INOUT) :: buf 324 INTEGER, INTENT(IN) :: n 325 INTEGER, INTENT(IN) :: Client_rank 326 INTEGER, INTENT(IN) :: tag 327 INTEGER, INTENT(OUT) :: ierr 328 329 ierr = 0 330 CALL MPI_Recv (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 324 331 MPI_STATUS_IGNORE, ierr) 325 332 … … 328 335 329 336 SUBROUTINE PMC_Send_to_Client_real_r2 (Client_id, buf, n, Client_rank, tag, ierr) 330 IMPLICIT none 331 INTEGER, INTENT(IN) :: Client_id 332 REAL(kind=dp), DIMENSION(:,:), INTENT(IN) :: buf 333 INTEGER, INTENT(IN) :: n 334 INTEGER, INTENT(IN) :: Client_rank 335 INTEGER, INTENT(IN) :: tag 336 INTEGER, INTENT(OUT) :: ierr 337 338 ierr = 0 339 CALL MPI_Send (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 337 338 IMPLICIT none 339 340 INTEGER, INTENT(IN) :: Client_id 341 REAL(wp), DIMENSION(:,:), INTENT(IN) :: buf 342 INTEGER, INTENT(IN) :: n 343 INTEGER, INTENT(IN) :: Client_rank 344 INTEGER, INTENT(IN) :: tag 345 INTEGER, INTENT(OUT) :: ierr 346 347 ierr = 0 348 CALL MPI_Send (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 340 349 ierr) 341 350 … … 344 353 345 354 SUBROUTINE PMC_Recv_from_Client_real_r2 (Client_id, buf, n, Client_rank, tag, ierr) 346 IMPLICIT none 347 INTEGER, INTENT(IN) :: Client_id 348 REAL(kind=dp), DIMENSION(:,:), INTENT(OUT):: buf 349 INTEGER, INTENT(IN) :: n 350 INTEGER, INTENT(IN) :: Client_rank 351 INTEGER, INTENT(IN) :: tag 352 INTEGER, INTENT(OUT) :: ierr 353 354 ierr = 0 355 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 355 356 IMPLICIT none 357 358 INTEGER, INTENT(IN) :: Client_id 359 REAL(wp), DIMENSION(:,:), INTENT(OUT) :: buf 360 INTEGER, INTENT(IN) :: n 361 INTEGER, INTENT(IN) :: Client_rank 362 INTEGER, INTENT(IN) :: tag 363 INTEGER, INTENT(OUT) :: ierr 364 365 ierr = 0 366 CALL MPI_Recv (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 356 367 MPI_STATUS_IGNORE, ierr) 357 368 … … 360 371 361 372 SUBROUTINE PMC_Send_to_Client_real_r3 (Client_id, buf, n, Client_rank, tag, ierr) 362 IMPLICIT none 363 INTEGER, INTENT(IN) :: Client_id 364 REAL(kind=dp), DIMENSION(:,:,:), INTENT(IN) :: buf 365 INTEGER, INTENT(IN) :: n 366 INTEGER, INTENT(IN) :: Client_rank 367 INTEGER, INTENT(IN) :: tag 368 INTEGER, INTENT(OUT) :: ierr 369 370 ierr = 0 371 CALL MPI_Send (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 373 374 IMPLICIT none 375 376 INTEGER, INTENT(IN) :: Client_id 377 REAL(wp), DIMENSION(:,:,:), INTENT(IN) :: buf 378 INTEGER, INTENT(IN) :: n 379 INTEGER, INTENT(IN) :: Client_rank 380 INTEGER, INTENT(IN) :: tag 381 INTEGER, INTENT(OUT) :: ierr 382 383 ierr = 0 384 CALL MPI_Send (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 372 385 ierr) 373 386 … … 376 389 377 390 SUBROUTINE PMC_Recv_from_Client_real_r3 (Client_id, buf, n, Client_rank, tag, ierr) 378 IMPLICIT none 379 INTEGER, INTENT(IN) :: Client_id 380 REAL(kind=dp), DIMENSION(:,:,:), INTENT(OUT):: buf 381 INTEGER, INTENT(IN) :: n 382 INTEGER, INTENT(IN) :: Client_rank 383 INTEGER, INTENT(IN) :: tag 384 INTEGER, INTENT(OUT) :: ierr 385 386 ierr = 0 387 CALL MPI_Recv (buf, n, MPI_DOUBLE_PRECISION, Client_rank, tag, m_to_client_comm(Client_id), & 391 392 IMPLICIT none 393 394 INTEGER, INTENT(IN) :: Client_id 395 REAL(wp), DIMENSION(:,:,:), INTENT(OUT) :: buf 396 INTEGER, INTENT(IN) :: n 397 INTEGER, INTENT(IN) :: Client_rank 398 INTEGER, INTENT(IN) :: tag 399 INTEGER, INTENT(OUT) :: ierr 400 401 ierr = 0 402 CALL MPI_Recv (buf, n, MPI_REAL, Client_rank, tag, m_to_client_comm(Client_id), & 388 403 MPI_STATUS_IGNORE, ierr) 389 404 -
palm/trunk/SOURCE/pmc_server.f90
r1767 r1779 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! kind=dp replaced by wp, 23 ! error messages removed or changed to PALM style, dim_order removed 24 ! array management changed from linked list to sequential loop 23 25 ! 24 26 ! Former revisions: … … 52 54 USE kinds 53 55 USE PMC_general, ONLY: ClientDef, PMC_MAX_MODELL,PMC_sort, DA_NameDef, DA_Desclen, DA_Namelen, & 54 PMC_G_SetName, P MC_G_GetName, PeDef, ArrayDef56 PMC_G_SetName, PeDef, ArrayDef, PMC_MAX_ARRAY 55 57 USE PMC_handle_communicator, ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_client_comm, & 56 58 PMC_Server_for_Client, m_world_rank … … 70 72 TYPE(ClientIndexDef),DIMENSION(PMC_MAX_MODELL) :: indClients 71 73 74 INTEGER :: next_array_in_list = 0 75 72 76 PUBLIC PMC_Server_for_Client 73 74 !-- TO_DO: what is the meaning of this? Could variables declared in this module75 !-- also have single precision?76 ! INTEGER, PARAMETER :: dp = wp77 78 ! INTERFACE section79 77 80 78 INTERFACE PMC_ServerInit … … 85 83 MODULE procedure PMC_S_Set_2D_index_list 86 84 END INTERFACE PMC_S_Set_2D_index_list 85 86 INTERFACE PMC_S_clear_next_array_list 87 MODULE procedure PMC_S_clear_next_array_list 88 END INTERFACE PMC_S_clear_next_array_list 87 89 88 90 INTERFACE PMC_S_GetNextArray … … 115 117 PUBLIC PMC_ServerInit, PMC_S_Set_2D_index_list, PMC_S_GetNextArray, PMC_S_Set_DataArray 116 118 PUBLIC PMC_S_setInd_and_AllocMem, PMC_S_FillBuffer, PMC_S_GetData_from_Buffer, PMC_S_Set_Active_data_array 119 PUBLIC PMC_S_clear_next_array_list 117 120 118 121 CONTAINS … … 145 148 146 149 ALLOCATE (Clients(ClientId)%PEs(Clients(ClientId)%inter_npes)) 147 148 do j=1,Clients(ClientId)%inter_npes ! Loop over all client PEs 149 NULLIFY(Clients(ClientId)%PEs(j)%Arrays) 150 ! 151 !-- Allocate for all client PEs an array of TYPE ArrayDef to store information of transfer array 152 do j=1,Clients(ClientId)%inter_npes 153 Allocate(Clients(ClientId)%PEs(j)%array_list(PMC_MAX_ARRAY)) 150 154 end do 151 155 … … 219 223 END SUBROUTINE PMC_S_Set_2D_index_list 220 224 221 logical function PMC_S_GetNextArray (ClientId, myName,Client_PeIndex) 225 SUBROUTINE PMC_S_clear_next_array_list 226 IMPLICIT none 227 228 next_array_in_list = 0 229 230 return 231 END SUBROUTINE PMC_S_clear_next_array_list 232 233 ! List handling is still required to get minimal interaction with pmc_interface 234 logical function PMC_S_GetNextArray (ClientId, myName) 235 INTEGER(iwp),INTENT(IN) :: ClientId 236 CHARACTER(len=*),INTENT(OUT) :: myName 237 238 !-- local variables 239 TYPE(PeDef),POINTER :: aPE 240 TYPE(ArrayDef),POINTER :: ar 241 242 next_array_in_list = next_array_in_list+1 243 244 !-- Array Names are the same on all client PE, so take first PE to get the name 245 aPE => Clients(ClientId)%PEs(1) 246 247 if(next_array_in_list > aPE%Nr_arrays) then 248 PMC_S_GetNextArray = .false. ! all arrays done 249 return 250 end if 251 252 ar => aPE%array_list(next_array_in_list) 253 myName = ar%name 254 255 PMC_S_GetNextArray = .true. ! Return true if legal array 256 return 257 END function PMC_S_GetNextArray 258 259 SUBROUTINE PMC_S_Set_DataArray_2d (ClientId, array, array_2 ) 260 261 IMPLICIT none 262 222 263 INTEGER,INTENT(IN) :: ClientId 223 CHARACTER(len=*),INTENT(OUT) :: myName 224 225 !-- local variables 226 INTEGER :: MyCoupleIndex 227 logical :: MyLast 228 CHARACTER(len=DA_Namelen) :: loName 229 INTEGER,INTENT(IN),optional :: Client_PeIndex 230 231 loName = ' ' 232 233 CALL PMC_G_GetName (clients(ClientId), MyCoupleIndex, loName, MyLast, Client_PeIndex) 234 235 myName = loName 236 237 PMC_S_GetNextArray = .NOT. MyLast ! Return true if valid array 238 239 return 240 END function PMC_S_GetNextArray 241 242 SUBROUTINE PMC_S_Set_DataArray_2d (ClientId, array, array_2 ) 243 IMPLICIT none 244 INTEGER,INTENT(IN) :: ClientId 245 !-- TO_DO: has array always to be of dp-kind, or can wp used here 246 !-- this effects all respective declarations in this file 247 REAL(kind=dp),INTENT(IN),DIMENSION(:,:) :: array 248 REAL(kind=dp),INTENT(IN),DIMENSION(:,:),OPTIONAL :: array_2 249 !-- local variables 264 REAL(wp), INTENT(IN), DIMENSION(:,:) :: array 265 REAL(wp), INTENT(IN), DIMENSION(:,:), OPTIONAL :: array_2 266 250 267 INTEGER :: NrDims 251 268 INTEGER,DIMENSION (4) :: dims 252 INTEGER :: dim_order253 269 TYPE(c_ptr) :: array_adr 254 270 TYPE(c_ptr) :: second_adr … … 259 275 dims(1) = size(array,1) 260 276 dims(2) = size(array,2) 261 dim_order = 2262 263 277 array_adr = c_loc(array) 264 278 265 279 IF ( PRESENT( array_2 ) ) THEN 266 280 second_adr = c_loc(array_2) 267 CALL PMC_S_SetArray (ClientId, NrDims, dims, dim_order,array_adr, second_adr = second_adr)281 CALL PMC_S_SetArray (ClientId, NrDims, dims, array_adr, second_adr = second_adr) 268 282 ELSE 269 CALL PMC_S_SetArray (ClientId, NrDims, dims, dim_order,array_adr)283 CALL PMC_S_SetArray (ClientId, NrDims, dims, array_adr) 270 284 ENDIF 271 285 … … 274 288 275 289 SUBROUTINE PMC_S_Set_DataArray_3d (ClientId, array, nz_cl, nz, array_2 ) 290 276 291 IMPLICIT none 292 277 293 INTEGER,INTENT(IN) :: ClientId 278 REAL( kind=dp),INTENT(IN),DIMENSION(:,:,:) ::array279 REAL( kind=dp),INTENT(IN),DIMENSION(:,:,:),OPTIONAL ::array_2294 REAL(wp), INTENT(IN), DIMENSION(:,:,:) :: array 295 REAL(wp), INTENT(IN), DIMENSION(:,:,:), OPTIONAL :: array_2 280 296 INTEGER,INTENT(IN) :: nz_cl 281 297 INTEGER,INTENT(IN) :: nz 282 !-- local variables 298 283 299 INTEGER :: NrDims 284 300 INTEGER,DIMENSION (4) :: dims 285 INTEGER :: dim_order286 301 TYPE(c_ptr) :: array_adr 287 302 TYPE(c_ptr) :: second_adr … … 294 309 dims(2) = size(array,2) 295 310 dims(3) = size(array,3) 296 dim_order = 33297 311 dims(4) = nz_cl+dims(1)-nz ! works for first dimension 1:nz and 0:nz+1 298 312 … … 304 318 IF ( PRESENT( array_2 ) ) THEN 305 319 second_adr = c_loc(array_2) 306 CALL PMC_S_SetArray (ClientId, NrDims, dims, dim_order,array_adr, second_adr = second_adr)320 CALL PMC_S_SetArray (ClientId, NrDims, dims, array_adr, second_adr = second_adr) 307 321 ELSE 308 CALL PMC_S_SetArray (ClientId, NrDims, dims, dim_order,array_adr)322 CALL PMC_S_SetArray (ClientId, NrDims, dims, array_adr) 309 323 ENDIF 310 324 … … 313 327 314 328 SUBROUTINE PMC_S_setInd_and_AllocMem (ClientId) 329 330 USE control_parameters, & 331 ONLY: message_string 332 315 333 IMPLICIT none 334 316 335 INTEGER,INTENT(IN) :: ClientId 317 336 318 INTEGER :: i, istat, ierr 337 INTEGER :: i, istat, ierr, j 319 338 INTEGER :: arlen, myIndex, tag 320 339 INTEGER :: rCount ! count MPI requests … … 337 356 aPE => Clients(ClientId)%PEs(i) 338 357 tag = 200 339 do while (PMC_S_GetNextArray ( ClientId, myName,i))340 ar => aPE% Arrays341 if(ar% dim_order== 2) then358 do j=1,aPE%Nr_arrays 359 ar => aPE%array_list(j) 360 if(ar%NrDims == 2) then 342 361 arlen = aPE%NrEle; ! 2D 343 else if(ar% dim_order == 33) then362 else if(ar%NrDims == 3) then 344 363 arlen = aPE%NrEle * ar%A_dim(4); ! PALM 3D 345 364 else … … 382 401 do i=1,Clients(ClientId)%inter_npes 383 402 aPE => Clients(ClientId)%PEs(i) 384 do while (PMC_S_GetNextArray ( ClientId, myName,i))385 ar => aPE% Arrays403 do j=1,aPE%Nr_arrays 404 ar => aPE%array_list(j) 386 405 !-- TO_DO: Adressrechnung ueberlegen? 387 406 ar%SendBuf = c_loc(base_array(ar%BufIndex)) !kk Adressrechnung ueberlegen 388 407 if(ar%BufIndex+ar%BufSize > bufsize) then 389 408 !-- TO_DO: can this error really happen, and what can be the reason? 390 write(0,'(a,i4,4i7,1x,a)') 'Buffer too small ',i,ar%BufIndex,ar%BufSize,ar%BufIndex+ar%BufSize,bufsize,trim( myName)409 write(0,'(a,i4,4i7,1x,a)') 'Buffer too small ',i,ar%BufIndex,ar%BufSize,ar%BufIndex+ar%BufSize,bufsize,trim(ar%name) 391 410 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr) 392 411 end if … … 399 418 SUBROUTINE PMC_S_FillBuffer (ClientId, WaitTime) 400 419 IMPLICIT none 401 INTEGER,INTENT(IN) :: ClientId 402 REAL(kind=dp),INTENT(OUT),optional :: WaitTime 403 404 !-- local variables 405 INTEGER :: ip,ij,istat,ierr 406 INTEGER :: myIndex 407 REAL(kind=dp) :: t1,t2 408 TYPE(PeDef),POINTER :: aPE 409 TYPE(ArrayDef),POINTER :: ar 410 CHARACTER(len=DA_Namelen) :: myName 411 INTEGER,DIMENSION(1) :: buf_shape 412 REAL(kind=wp),POINTER,DIMENSION(:) :: buf 413 REAL(kind=wp),POINTER,DIMENSION(:,:) :: data_2d 414 REAL(kind=wp),POINTER,DIMENSION(:,:,:) :: data_3d 420 INTEGER,INTENT(IN) :: ClientId 421 REAL(wp), INTENT(OUT), OPTIONAL :: WaitTime 422 423 INTEGER :: ip,ij,istat,ierr,j 424 INTEGER :: myIndex 425 REAL(wp) :: t1,t2 426 TYPE(PeDef),POINTER :: aPE 427 TYPE(ArrayDef),POINTER :: ar 428 CHARACTER(len=DA_Namelen) :: myName 429 INTEGER,DIMENSION(1) :: buf_shape 430 REAL(wp), POINTER, DIMENSION(:) :: buf 431 REAL(wp), POINTER, DIMENSION(:,:) :: data_2d 432 REAL(wp), POINTER, DIMENSION(:,:,:) :: data_3d 415 433 416 434 t1 = PMC_Time() … … 421 439 do ip=1,Clients(ClientId)%inter_npes 422 440 aPE => Clients(ClientId)%PEs(ip) 423 do while (PMC_S_GetNextArray ( ClientId, myName,ip))424 ar => aPE% Arrays441 do j=1,aPE%Nr_arrays 442 ar => aPE%array_list(j) 425 443 myIndex=1 426 if(ar% dim_order== 2) then444 if(ar%NrDims == 2) then 427 445 buf_shape(1) = aPE%NrEle 428 446 CALL c_f_pointer(ar%SendBuf, buf, buf_shape) … … 432 450 myIndex = myIndex+1 433 451 end do 434 else if(ar% dim_order == 33) then452 else if(ar%NrDims == 3) then 435 453 buf_shape(1) = aPE%NrEle*ar%A_dim(4) 436 454 CALL c_f_pointer(ar%SendBuf, buf, buf_shape) … … 440 458 myIndex = myIndex+ar%A_dim(4) 441 459 end do 442 else443 !-- TO_DO: can this error really happen, and what can be the reason?444 write(0,*) "Illegal Order of Dimension ",ar%dim_order445 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr);446 447 460 end if 448 461 end do 449 462 end do 450 463 451 CALL MPI_Barrier(Clients(ClientId)%intra_comm, ierr) 464 CALL MPI_Barrier(Clients(ClientId)%intra_comm, ierr) ! buffer is full 452 465 453 466 return … … 455 468 456 469 SUBROUTINE PMC_S_GetData_from_Buffer (ClientId, WaitTime) 470 457 471 IMPLICIT none 458 INTEGER,INTENT(IN) :: ClientId 459 REAL(kind=dp),INTENT(OUT),optional :: WaitTime 472 473 INTEGER,INTENT(IN) :: ClientId 474 REAL(wp), INTENT(OUT), OPTIONAL :: WaitTime 460 475 461 476 !-- local variables 462 INTEGER :: ip,ij,istat,ierr463 INTEGER ::myIndex464 REAL( kind=dp) ::t1,t2465 TYPE(PeDef),POINTER ::aPE466 TYPE(ArrayDef),POINTER ::ar467 CHARACTER(len=DA_Namelen) ::myName468 INTEGER,DIMENSION(1) ::buf_shape469 REAL( kind=wp),POINTER,DIMENSION(:) ::buf470 REAL( kind=wp),POINTER,DIMENSION(:,:) ::data_2d471 REAL( kind=wp),POINTER,DIMENSION(:,:,:) ::data_3d477 INTEGER :: ip,ij,istat,ierr,j 478 INTEGER :: myIndex 479 REAL(wp) :: t1,t2 480 TYPE(PeDef),POINTER :: aPE 481 TYPE(ArrayDef),POINTER :: ar 482 CHARACTER(len=DA_Namelen) :: myName 483 INTEGER,DIMENSION(1) :: buf_shape 484 REAL(wp), POINTER, DIMENSION(:) :: buf 485 REAL(wp), POINTER, DIMENSION(:,:) :: data_2d 486 REAL(wp), POINTER, DIMENSION(:,:,:) :: data_3d 472 487 473 488 t1 = PMC_Time() … … 478 493 do ip=1,Clients(ClientId)%inter_npes 479 494 aPE => Clients(ClientId)%PEs(ip) 480 do while (PMC_S_GetNextArray ( ClientId, myName,ip))481 ar => aPE% Arrays495 do j=1,aPE%Nr_arrays 496 ar => aPE%array_list(j) 482 497 myIndex=1 483 if(ar% dim_order== 2) then498 if(ar%NrDims == 2) then 484 499 buf_shape(1) = aPE%NrEle 485 500 CALL c_f_pointer(ar%SendBuf, buf, buf_shape) … … 489 504 myIndex = myIndex+1 490 505 end do 491 else if(ar% dim_order == 33) then506 else if(ar%NrDims == 3) then 492 507 buf_shape(1) = aPE%NrEle*ar%A_dim(4) 493 508 CALL c_f_pointer(ar%SendBuf, buf, buf_shape) … … 497 512 myIndex = myIndex+ar%A_dim(4) 498 513 end do 499 else500 !-- TO_DO: can this error really happen, and what can be the reason?501 write(0,*) "Illegal Order of Dimension ",ar%dim_order502 CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr);503 504 514 end if 505 515 end do … … 535 545 END SUBROUTINE Get_DA_names_from_client 536 546 537 SUBROUTINE PMC_S_SetArray (ClientId, NrDims, dims, dim_order,array_adr, second_adr)547 SUBROUTINE PMC_S_SetArray (ClientId, NrDims, dims, array_adr, second_adr) 538 548 IMPLICIT none 539 549 … … 541 551 INTEGER,INTENT(IN) :: NrDims 542 552 INTEGER,INTENT(IN),DIMENSION(:) :: dims 543 INTEGER,INTENT(IN) :: dim_order544 553 TYPE(c_ptr),INTENT(IN) :: array_adr 545 554 TYPE(c_ptr),INTENT(IN),OPTIONAL :: second_adr … … 554 563 do i=1,Clients(ClientId)%inter_npes 555 564 aPE => Clients(ClientId)%PEs(i) 556 ar => aPE% Arrays565 ar => aPE%array_list(next_array_in_list) 557 566 ar%NrDims = NrDims 558 567 ar%A_dim = dims 559 ar%dim_order = dim_order560 568 ar%data = array_adr 561 569 if(present(second_adr)) then … … 579 587 580 588 !-- local variables 581 INTEGER :: i, ip 589 INTEGER :: i, ip, j 582 590 TYPE(PeDef),POINTER :: aPE 583 591 TYPE(ArrayDef),POINTER :: ar … … 586 594 do ip=1,Clients(ClientId)%inter_npes 587 595 aPE => Clients(ClientId)%PEs(ip) 588 do while (PMC_S_GetNextArray ( ClientId, myName,ip))589 ar => aPE% Arrays596 do j=1,aPE%Nr_arrays 597 ar => aPE%array_list(j) 590 598 if(iactive == 1 .OR. iactive == 2) then 591 599 ar%data = ar%po_data(iactive)
Note: See TracChangeset
for help on using the changeset viewer.