[1762] | 1 | MODULE pmc_client |
---|
| 2 | |
---|
| 3 | !--------------------------------------------------------------------------------! |
---|
| 4 | ! This file is part of PALM. |
---|
| 5 | ! |
---|
| 6 | ! PALM is free software: you can redistribute it and/or modify it under the terms |
---|
| 7 | ! of the GNU General Public License as published by the Free Software Foundation, |
---|
| 8 | ! either version 3 of the License, or (at your option) any later version. |
---|
| 9 | ! |
---|
| 10 | ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY |
---|
| 11 | ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR |
---|
| 12 | ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
---|
| 13 | ! |
---|
| 14 | ! You should have received a copy of the GNU General Public License along with |
---|
| 15 | ! PALM. If not, see <http://www.gnu.org/licenses/>. |
---|
| 16 | ! |
---|
| 17 | ! Copyright 1997-2015 Leibniz Universitaet Hannover |
---|
| 18 | !--------------------------------------------------------------------------------! |
---|
| 19 | ! |
---|
| 20 | ! Current revisions: |
---|
| 21 | ! ------------------ |
---|
| 22 | ! |
---|
[1809] | 23 | ! |
---|
[1762] | 24 | ! Former revisions: |
---|
| 25 | ! ----------------- |
---|
| 26 | ! $Id: pmc_client.f90 1809 2016-04-05 20:13:28Z maronga $ |
---|
| 27 | ! |
---|
[1809] | 28 | ! 1808 2016-04-05 19:44:00Z raasch |
---|
| 29 | ! MPI module used by default on all machines |
---|
| 30 | ! |
---|
[1798] | 31 | ! 1797 2016-03-21 16:50:28Z raasch |
---|
| 32 | ! introduction of different datatransfer modes |
---|
| 33 | ! |
---|
[1792] | 34 | ! 1791 2016-03-11 10:41:25Z raasch |
---|
| 35 | ! Debug write-statement commented out |
---|
| 36 | ! |
---|
[1787] | 37 | ! 1786 2016-03-08 05:49:27Z raasch |
---|
| 38 | ! change in client-server data transfer: server now gets data from client |
---|
| 39 | ! instead that client put's it to the server |
---|
| 40 | ! |
---|
[1784] | 41 | ! 1783 2016-03-06 18:36:17Z raasch |
---|
| 42 | ! Bugfix: wrong data-type in MPI_WIN_CREATE replaced |
---|
| 43 | ! |
---|
[1780] | 44 | ! 1779 2016-03-03 08:01:28Z raasch |
---|
| 45 | ! kind=dp replaced by wp, dim_order removed |
---|
| 46 | ! array management changed from linked list to sequential loop |
---|
| 47 | ! |
---|
[1765] | 48 | ! 1764 2016-02-28 12:45:19Z raasch |
---|
| 49 | ! cpp-statement added (nesting can only be used in parallel mode), |
---|
| 50 | ! all kinds given in PALM style |
---|
| 51 | ! |
---|
[1763] | 52 | ! 1762 2016-02-25 12:31:13Z hellstea |
---|
| 53 | ! Initial revision by K. Ketelsen |
---|
[1762] | 54 | ! |
---|
| 55 | ! Description: |
---|
| 56 | ! ------------ |
---|
| 57 | ! |
---|
| 58 | ! Client part of Palm Model Coupler |
---|
| 59 | !------------------------------------------------------------------------------! |
---|
| 60 | |
---|
[1764] | 61 | #if defined( __parallel ) |
---|
[1762] | 62 | |
---|
| 63 | use, intrinsic :: iso_c_binding |
---|
| 64 | |
---|
[1808] | 65 | #if defined( __mpifh ) |
---|
| 66 | INCLUDE "mpif.h" |
---|
| 67 | #else |
---|
[1764] | 68 | USE MPI |
---|
| 69 | #endif |
---|
| 70 | USE kinds |
---|
[1762] | 71 | USE PMC_general, ONLY: ClientDef, DA_NameDef, DA_Namelen, PMC_STATUS_OK, PMC_DA_NAME_ERR, PeDef, ArrayDef, & |
---|
[1779] | 72 | DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_MAX_ARRAY |
---|
[1762] | 73 | USE PMC_handle_communicator, ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_server_comm |
---|
| 74 | USE PMC_MPI_wrapper, ONLY: PMC_Send_to_Server, PMC_Recv_from_Server, PMC_Time, & |
---|
| 75 | PMC_Bcast, PMC_Inter_Bcast, PMC_Alloc_mem |
---|
| 76 | IMPLICIT none |
---|
| 77 | PRIVATE |
---|
| 78 | SAVE |
---|
| 79 | |
---|
| 80 | Type(ClientDef) :: me |
---|
| 81 | |
---|
[1779] | 82 | INTEGER :: next_array_in_list = 0 |
---|
| 83 | INTEGER :: myIndex = 0 !Counter and unique number for Data Arrays |
---|
[1762] | 84 | |
---|
| 85 | ! INTERFACE section |
---|
| 86 | |
---|
| 87 | INTERFACE PMC_ClientInit |
---|
| 88 | MODULE procedure PMC_ClientInit |
---|
| 89 | END INTERFACE PMC_ClientInit |
---|
| 90 | |
---|
| 91 | INTERFACE PMC_Set_DataArray_Name |
---|
| 92 | MODULE procedure PMC_Set_DataArray_Name |
---|
| 93 | MODULE procedure PMC_Set_DataArray_Name_LastEntry |
---|
| 94 | END INTERFACE PMC_Set_DataArray_Name |
---|
| 95 | |
---|
| 96 | INTERFACE PMC_C_Get_2D_index_list |
---|
| 97 | MODULE procedure PMC_C_Get_2D_index_list |
---|
| 98 | END INTERFACE PMC_C_Get_2D_index_list |
---|
| 99 | |
---|
[1779] | 100 | INTERFACE PMC_C_clear_next_array_list |
---|
| 101 | MODULE procedure PMC_C_clear_next_array_list |
---|
| 102 | END INTERFACE PMC_C_clear_next_array_list |
---|
| 103 | |
---|
[1762] | 104 | INTERFACE PMC_C_GetNextArray |
---|
| 105 | MODULE procedure PMC_C_GetNextArray |
---|
| 106 | END INTERFACE PMC_C_GetNextArray |
---|
| 107 | |
---|
| 108 | INTERFACE PMC_C_Set_DataArray |
---|
| 109 | MODULE procedure PMC_C_Set_DataArray_2d |
---|
| 110 | MODULE procedure PMC_C_Set_DataArray_3d |
---|
| 111 | END INTERFACE PMC_C_Set_DataArray |
---|
| 112 | |
---|
| 113 | INTERFACE PMC_C_setInd_and_AllocMem |
---|
| 114 | MODULE procedure PMC_C_setInd_and_AllocMem |
---|
| 115 | END INTERFACE PMC_C_setInd_and_AllocMem |
---|
| 116 | |
---|
| 117 | INTERFACE PMC_C_GetBuffer |
---|
| 118 | MODULE procedure PMC_C_GetBuffer |
---|
| 119 | END INTERFACE PMC_C_GetBuffer |
---|
| 120 | |
---|
| 121 | INTERFACE PMC_C_PutBuffer |
---|
| 122 | MODULE procedure PMC_C_PutBuffer |
---|
| 123 | END INTERFACE PMC_C_PutBuffer |
---|
| 124 | |
---|
| 125 | ! Public section |
---|
| 126 | |
---|
| 127 | PUBLIC PMC_ClientInit , PMC_Set_DataArray_Name, PMC_C_Get_2D_index_list |
---|
[1779] | 128 | PUBLIC PMC_C_GetNextArray, PMC_C_Set_DataArray, PMC_C_clear_next_array_list |
---|
| 129 | PUBLIC PMC_C_setInd_and_AllocMem , PMC_C_GetBuffer, PMC_C_PutBuffer |
---|
[1762] | 130 | |
---|
| 131 | CONTAINS |
---|
| 132 | |
---|
| 133 | SUBROUTINE PMC_ClientInit |
---|
| 134 | IMPLICIT none |
---|
| 135 | |
---|
| 136 | INTEGER :: i |
---|
| 137 | INTEGER :: istat |
---|
| 138 | |
---|
| 139 | |
---|
| 140 | ! Tailor MPI environment |
---|
| 141 | |
---|
| 142 | me%model_comm = m_model_comm |
---|
| 143 | me%inter_comm = m_to_server_comm |
---|
| 144 | |
---|
| 145 | ! Get rank and size |
---|
| 146 | CALL MPI_Comm_rank (me%model_comm, me%model_rank, istat); |
---|
| 147 | CALL MPI_Comm_size (me%model_comm, me%model_npes, istat); |
---|
| 148 | CALL MPI_Comm_remote_size (me%inter_comm, me%inter_npes, istat); |
---|
| 149 | |
---|
| 150 | ! intra communicater is used for MPI_Get |
---|
| 151 | CALL MPI_Intercomm_merge (me%inter_comm, .true., me%intra_comm, istat); |
---|
| 152 | CALL MPI_Comm_rank (me%intra_comm, me%intra_rank, istat); |
---|
| 153 | ALLOCATE (me%PEs(me%inter_npes)) |
---|
| 154 | |
---|
[1779] | 155 | ! |
---|
| 156 | !-- Allocate for all Server PEs an array of TYPE ArrayDef to store information of transfer array |
---|
[1762] | 157 | do i=1,me%inter_npes |
---|
[1779] | 158 | ALLOCATE(me%PEs(i)%array_list(PMC_MAX_ARRAY)) |
---|
[1762] | 159 | end do |
---|
| 160 | |
---|
[1791] | 161 | ! if(me%model_rank == 0) write(0,'(a,5i6)') 'PMC_ClientInit ',me%model_rank,me%model_npes,me%inter_npes,me%intra_rank |
---|
[1762] | 162 | |
---|
| 163 | return |
---|
| 164 | END SUBROUTINE PMC_ClientInit |
---|
| 165 | |
---|
[1779] | 166 | SUBROUTINE PMC_Set_DataArray_Name (ServerArrayDesc, ServerArrayName, ClientArrayDesc, ClientArrayName, istat) |
---|
[1762] | 167 | IMPLICIT none |
---|
| 168 | character(len=*),INTENT(IN) :: ServerArrayName |
---|
| 169 | character(len=*),INTENT(IN) :: ServerArrayDesc |
---|
| 170 | character(len=*),INTENT(IN) :: ClientArrayName |
---|
| 171 | character(len=*),INTENT(IN) :: ClientArrayDesc |
---|
| 172 | INTEGER,INTENT(OUT) :: istat |
---|
| 173 | |
---|
| 174 | !-- local variables |
---|
| 175 | type(DA_NameDef) :: myName |
---|
| 176 | INTEGER :: myPe |
---|
| 177 | INTEGER :: my_AddiArray=0 |
---|
| 178 | |
---|
| 179 | istat = PMC_STATUS_OK |
---|
| 180 | if(len(trim(ServerArrayName)) > DA_Namelen .or. & |
---|
| 181 | len(trim(ClientArrayName)) > DA_Namelen) then !Name too long |
---|
| 182 | istat = PMC_DA_NAME_ERR |
---|
| 183 | end if |
---|
| 184 | |
---|
| 185 | if(m_model_rank == 0) then |
---|
| 186 | myIndex = myIndex+1 |
---|
| 187 | myName%couple_index = myIndex |
---|
| 188 | myName%ServerDesc = trim(ServerArrayDesc) |
---|
| 189 | myName%NameOnServer = trim(ServerArrayName) |
---|
| 190 | myName%ClientDesc = trim(ClientArrayDesc) |
---|
| 191 | myName%NameOnClient = trim(ClientArrayName) |
---|
| 192 | end if |
---|
| 193 | |
---|
| 194 | ! Broadcat to all Client PEs |
---|
| 195 | |
---|
| 196 | CALL PMC_Bcast ( myName%couple_index, 0, comm=m_model_comm) |
---|
| 197 | CALL PMC_Bcast ( myName%ServerDesc, 0, comm=m_model_comm) |
---|
| 198 | CALL PMC_Bcast ( myName%NameOnServer, 0, comm=m_model_comm) |
---|
| 199 | CALL PMC_Bcast ( myName%ClientDesc, 0, comm=m_model_comm) |
---|
| 200 | CALL PMC_Bcast ( myName%NameOnClient, 0, comm=m_model_comm) |
---|
| 201 | |
---|
| 202 | ! Broadcat to all Server PEs |
---|
| 203 | |
---|
| 204 | if(m_model_rank == 0) then |
---|
| 205 | myPE = MPI_ROOT |
---|
| 206 | else |
---|
| 207 | myPE = MPI_PROC_NULL |
---|
| 208 | endif |
---|
| 209 | CALL PMC_Bcast ( myName%couple_index, myPE, comm=m_to_server_comm) |
---|
| 210 | CALL PMC_Bcast ( myName%ServerDesc, myPE, comm=m_to_server_comm) |
---|
| 211 | CALL PMC_Bcast ( myName%NameOnServer, myPE, comm=m_to_server_comm) |
---|
| 212 | CALL PMC_Bcast ( myName%ClientDesc, myPE, comm=m_to_server_comm) |
---|
| 213 | CALL PMC_Bcast ( myName%NameOnClient, myPE, comm=m_to_server_comm) |
---|
| 214 | |
---|
| 215 | CALL PMC_G_SetName (me, myName%couple_index, myName%NameOnClient) |
---|
| 216 | |
---|
| 217 | return |
---|
| 218 | END SUBROUTINE PMC_Set_DataArray_Name |
---|
| 219 | |
---|
| 220 | SUBROUTINE PMC_Set_DataArray_Name_LastEntry (LastEntry) |
---|
| 221 | IMPLICIT none |
---|
| 222 | LOGICAL,INTENT(IN),optional :: LastEntry |
---|
| 223 | |
---|
| 224 | !-- local variables |
---|
| 225 | type(DA_NameDef) :: myName |
---|
| 226 | INTEGER :: myPe |
---|
| 227 | |
---|
| 228 | myName%couple_index = -1 |
---|
| 229 | |
---|
| 230 | if(m_model_rank == 0) then |
---|
| 231 | myPE = MPI_ROOT |
---|
| 232 | else |
---|
| 233 | myPE = MPI_PROC_NULL |
---|
| 234 | endif |
---|
| 235 | CALL PMC_Bcast ( myName%couple_index, myPE, comm=m_to_server_comm) |
---|
| 236 | |
---|
| 237 | return |
---|
| 238 | END SUBROUTINE PMC_Set_DataArray_Name_LastEntry |
---|
| 239 | |
---|
| 240 | SUBROUTINE PMC_C_Get_2D_index_list |
---|
| 241 | IMPLICIT none |
---|
| 242 | |
---|
| 243 | INTEGER :: i,j,i2,nr,ierr |
---|
| 244 | INTEGER :: dummy |
---|
| 245 | INTEGER :: indWin !: MPI window object |
---|
| 246 | INTEGER :: indWin2 !: MPI window object |
---|
| 247 | INTEGER(KIND=MPI_ADDRESS_KIND) :: win_size !: Size of MPI window 1 (in bytes) |
---|
| 248 | INTEGER(KIND=MPI_ADDRESS_KIND) :: disp !: Displacement Unit (Integer = 4, floating poit = 8 |
---|
| 249 | INTEGER,DIMENSION(me%inter_npes*2) :: NrEle !: Number of Elements of a horizontal slice |
---|
[1779] | 250 | TYPE(PeDef),POINTER :: aPE !: Pointer to PeDef structure |
---|
[1762] | 251 | INTEGER(KIND=MPI_ADDRESS_KIND) :: WinSize !: Size of MPI window 2 (in bytes) |
---|
| 252 | INTEGER,DIMENSION(:),POINTER :: myInd |
---|
| 253 | |
---|
| 254 | ! CALL PMC_C_CGet_Rem_index_list |
---|
| 255 | |
---|
| 256 | win_size = c_sizeof(dummy) |
---|
| 257 | CALL MPI_Win_create (dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm, indWin, ierr); |
---|
| 258 | CALL MPI_Win_fence (0, indWin, ierr) ! Open Window on Server side |
---|
| 259 | CALL MPI_Win_fence (0, indWin, ierr) ! Close Window on Server Side and opem on Client side |
---|
| 260 | |
---|
| 261 | do i=1,me%inter_npes |
---|
| 262 | disp = me%model_rank*2 |
---|
| 263 | CALL MPI_Get (NrEle((i-1)*2+1),2,MPI_INTEGER,i-1,disp,2,MPI_INTEGER,indWin, ierr) |
---|
| 264 | end do |
---|
| 265 | CALL MPI_Win_fence (0, indWin, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 266 | |
---|
| 267 | WinSize = 0 |
---|
| 268 | do i=1,me%inter_npes !Allocate memory for index array |
---|
| 269 | aPE => me%PEs(i) |
---|
| 270 | i2 = (i-1)*2+1 |
---|
| 271 | nr = NrEle(i2+1) |
---|
| 272 | if(nr > 0) then |
---|
| 273 | ALLOCATE(aPE%locInd(nr)) |
---|
| 274 | else |
---|
| 275 | NULLIFY (aPE%locInd) |
---|
| 276 | endif |
---|
| 277 | WinSize = max(nr,WinSize) !Maximum window size |
---|
| 278 | end do |
---|
| 279 | |
---|
| 280 | ALLOCATE(myInd(2*WinSize)) |
---|
[1783] | 281 | WinSize = 1 |
---|
[1762] | 282 | |
---|
| 283 | ! local Buffer used in MPI_Get can but must not be inside the MPI Window |
---|
| 284 | ! Here, we use a dummy for MPI Window because the server PEs do not access the RMA window via MPI_get or MPI_Put |
---|
| 285 | |
---|
[1783] | 286 | CALL MPI_Win_create (dummy, WinSize, iwp, MPI_INFO_NULL, me%intra_comm, indWin2, ierr); |
---|
[1762] | 287 | |
---|
| 288 | CALL MPI_Win_fence (0, indWin2, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 289 | CALL MPI_Win_fence (0, indWin2, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 290 | |
---|
| 291 | do i=1,me%inter_npes |
---|
| 292 | aPE => me%PEs(i) |
---|
| 293 | nr = NrEle(i*2) |
---|
| 294 | if(nr > 0 ) then |
---|
| 295 | disp = NrEle(2*(i-1)+1) |
---|
| 296 | CALL MPI_Win_lock (MPI_LOCK_SHARED , i-1, 0, indWin2, ierr) |
---|
| 297 | CALL MPI_Get (myInd,2*nr,MPI_INTEGER,i-1,disp,2*nr,MPI_INTEGER,indWin2, ierr) |
---|
| 298 | CALL MPI_Win_unlock (i-1, indWin2, ierr) |
---|
| 299 | do j=1,nr |
---|
| 300 | aPE%locInd(j)%i = myInd(2*j-1) |
---|
| 301 | aPE%locInd(j)%j = myInd(2*j) |
---|
| 302 | end do |
---|
| 303 | aPE%NrEle = nr |
---|
| 304 | else |
---|
| 305 | aPE%NrEle = -1 |
---|
| 306 | end if |
---|
| 307 | end do |
---|
| 308 | |
---|
| 309 | CALL MPI_Barrier(me%intra_comm, ierr) ! Dont know why, but this barrier is necessary before we can free the windows |
---|
| 310 | |
---|
| 311 | CALL MPI_Win_free(indWin, ierr); |
---|
| 312 | CALL MPI_Win_free(indWin2, ierr); |
---|
| 313 | DEALLOCATE (myInd) |
---|
| 314 | |
---|
| 315 | return |
---|
| 316 | END SUBROUTINE PMC_C_Get_2D_index_list |
---|
| 317 | |
---|
[1779] | 318 | SUBROUTINE PMC_C_clear_next_array_list |
---|
| 319 | IMPLICIT none |
---|
| 320 | |
---|
| 321 | next_array_in_list = 0 |
---|
| 322 | |
---|
| 323 | return |
---|
| 324 | END SUBROUTINE PMC_C_clear_next_array_list |
---|
| 325 | |
---|
| 326 | ! List handling is still required to get minimal interaction with pmc_interface |
---|
[1762] | 327 | LOGICAL function PMC_C_GetNextArray (myName) |
---|
| 328 | character(len=*),INTENT(OUT) :: myName |
---|
| 329 | |
---|
| 330 | !-- local variables |
---|
[1779] | 331 | TYPE(PeDef),POINTER :: aPE |
---|
| 332 | TYPE(ArrayDef),POINTER :: ar |
---|
[1762] | 333 | |
---|
[1779] | 334 | next_array_in_list = next_array_in_list+1 |
---|
[1762] | 335 | |
---|
[1779] | 336 | !-- Array Names are the same on all client PE, so take first PE to get the name |
---|
| 337 | aPE => me%PEs(1) |
---|
[1762] | 338 | |
---|
[1779] | 339 | if(next_array_in_list > aPE%Nr_arrays) then |
---|
| 340 | PMC_C_GetNextArray = .false. !all arrays done |
---|
| 341 | return |
---|
| 342 | end if |
---|
[1762] | 343 | |
---|
[1779] | 344 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 345 | |
---|
[1779] | 346 | myName = ar%name |
---|
| 347 | |
---|
| 348 | PMC_C_GetNextArray = .true. ! Return true if legal array |
---|
| 349 | return |
---|
[1762] | 350 | END function PMC_C_GetNextArray |
---|
| 351 | |
---|
| 352 | SUBROUTINE PMC_C_Set_DataArray_2d (array) |
---|
[1779] | 353 | |
---|
[1762] | 354 | IMPLICIT none |
---|
| 355 | |
---|
[1779] | 356 | REAL(wp), INTENT(IN) ,DIMENSION(:,:) :: array |
---|
[1762] | 357 | |
---|
[1779] | 358 | INTEGER :: NrDims |
---|
| 359 | INTEGER,DIMENSION (4) :: dims |
---|
| 360 | TYPE(c_ptr) :: array_adr |
---|
| 361 | INTEGER :: i |
---|
| 362 | TYPE(PeDef),POINTER :: aPE |
---|
| 363 | TYPE(ArrayDef),POINTER :: ar |
---|
| 364 | |
---|
| 365 | |
---|
[1762] | 366 | dims = 1 |
---|
| 367 | |
---|
| 368 | NrDims = 2 |
---|
| 369 | dims(1) = size(array,1) |
---|
| 370 | dims(2) = size(array,2) |
---|
| 371 | |
---|
| 372 | array_adr = c_loc(array) |
---|
| 373 | |
---|
| 374 | do i=1,me%inter_npes |
---|
| 375 | aPE => me%PEs(i) |
---|
[1786] | 376 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 377 | ar%NrDims = NrDims |
---|
| 378 | ar%A_dim = dims |
---|
| 379 | ar%data = array_adr |
---|
| 380 | end do |
---|
| 381 | |
---|
| 382 | return |
---|
| 383 | END SUBROUTINE PMC_C_Set_DataArray_2d |
---|
| 384 | |
---|
| 385 | SUBROUTINE PMC_C_Set_DataArray_3d (array) |
---|
[1779] | 386 | |
---|
[1762] | 387 | IMPLICIT none |
---|
| 388 | |
---|
[1779] | 389 | REAL(wp),INTENT(IN),DIMENSION(:,:,:) :: array |
---|
| 390 | |
---|
| 391 | INTEGER :: NrDims |
---|
| 392 | INTEGER,DIMENSION (4) :: dims |
---|
| 393 | TYPE(c_ptr) :: array_adr |
---|
| 394 | INTEGER :: i |
---|
| 395 | TYPE(PeDef),POINTER :: aPE |
---|
| 396 | TYPE(ArrayDef),POINTER :: ar |
---|
| 397 | |
---|
[1762] | 398 | dims = 1 |
---|
| 399 | |
---|
| 400 | NrDims = 3 |
---|
| 401 | dims(1) = size(array,1) |
---|
| 402 | dims(2) = size(array,2) |
---|
| 403 | dims(3) = size(array,3) |
---|
| 404 | |
---|
| 405 | array_adr = c_loc(array) |
---|
| 406 | |
---|
| 407 | do i=1,me%inter_npes |
---|
| 408 | aPE => me%PEs(i) |
---|
[1797] | 409 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 410 | ar%NrDims = NrDims |
---|
| 411 | ar%A_dim = dims |
---|
| 412 | ar%data = array_adr |
---|
| 413 | end do |
---|
| 414 | |
---|
| 415 | return |
---|
| 416 | END SUBROUTINE PMC_C_Set_DataArray_3d |
---|
| 417 | |
---|
| 418 | SUBROUTINE PMC_C_setInd_and_AllocMem |
---|
[1764] | 419 | |
---|
[1762] | 420 | IMPLICIT none |
---|
| 421 | |
---|
[1786] | 422 | !-- naming convention: appending _sc -> server to client transfer |
---|
| 423 | !-- _cs -> client to server transfer |
---|
| 424 | !-- Recv -> server to client transfer |
---|
| 425 | !-- Send -> client to server transfer |
---|
| 426 | |
---|
| 427 | INTEGER :: i, istat, ierr, j |
---|
| 428 | INTEGER,PARAMETER :: NoINdex=-1 |
---|
| 429 | INTEGER :: rcount |
---|
[1762] | 430 | INTEGER :: arlen, myIndex, tag |
---|
[1764] | 431 | INTEGER(idp) :: bufsize ! Size of MPI data Window |
---|
[1762] | 432 | TYPE(PeDef),POINTER :: aPE |
---|
| 433 | TYPE(ArrayDef),POINTER :: ar |
---|
[1786] | 434 | INTEGER,DIMENSION(1024) :: req |
---|
[1762] | 435 | character(len=DA_Namelen) :: myName |
---|
| 436 | Type(c_ptr) :: base_ptr |
---|
[1786] | 437 | REAL(kind=wp),DIMENSION(:),POINTER,save :: base_array_sc !Base array |
---|
| 438 | REAL(kind=wp),DIMENSION(:),POINTER,save :: base_array_cs !Base array |
---|
[1762] | 439 | INTEGER(KIND=MPI_ADDRESS_KIND) :: WinSize |
---|
| 440 | |
---|
| 441 | myIndex = 0 |
---|
| 442 | bufsize = 8 |
---|
| 443 | |
---|
[1786] | 444 | !-- Server to client direction |
---|
[1762] | 445 | |
---|
[1786] | 446 | !-- First stride, Compute size and set index |
---|
| 447 | |
---|
[1762] | 448 | do i=1,me%inter_npes |
---|
| 449 | aPE => me%PEs(i) |
---|
| 450 | tag = 200 |
---|
| 451 | |
---|
[1779] | 452 | do j=1,aPE%Nr_arrays |
---|
| 453 | ar => aPE%array_list(j) |
---|
[1762] | 454 | |
---|
| 455 | ! Receive Index from client |
---|
| 456 | tag = tag+1 |
---|
| 457 | CALL MPI_Recv (myIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, MPI_STATUS_IGNORE, ierr) |
---|
| 458 | |
---|
[1786] | 459 | if(ar%NrDims == 3) then |
---|
[1762] | 460 | bufsize = max(bufsize,ar%A_dim(1)*ar%A_dim(2)*ar%A_dim(3)) ! determine max, because client buffer is allocated only once |
---|
| 461 | else |
---|
| 462 | bufsize = max(bufsize,ar%A_dim(1)*ar%A_dim(2)) |
---|
| 463 | end if |
---|
[1786] | 464 | ar%RecvIndex = myIndex |
---|
| 465 | |
---|
| 466 | end do |
---|
| 467 | end do |
---|
| 468 | |
---|
| 469 | |
---|
| 470 | !-- Create RMA (One Sided Communication) data buffer |
---|
| 471 | !-- The buffer for MPI_Get can be PE local, i.e. it can but must not be part of the MPI RMA window |
---|
| 472 | |
---|
| 473 | CALL PMC_Alloc_mem (base_array_sc, bufsize, base_ptr) |
---|
| 474 | me%TotalBufferSize = bufsize*wp ! Total buffer size in Byte |
---|
| 475 | |
---|
| 476 | !-- Second stride, Set Buffer pointer |
---|
| 477 | |
---|
| 478 | do i=1,me%inter_npes |
---|
| 479 | aPE => me%PEs(i) |
---|
| 480 | |
---|
| 481 | do j=1,aPE%Nr_arrays |
---|
| 482 | ar => aPE%array_list(j) |
---|
| 483 | ar%RecvBuf = base_ptr |
---|
| 484 | end do |
---|
| 485 | end do |
---|
| 486 | |
---|
| 487 | !-- Client to server direction |
---|
| 488 | |
---|
| 489 | myIndex = 1 |
---|
| 490 | rCount = 0 |
---|
| 491 | bufsize = 8 |
---|
| 492 | |
---|
| 493 | do i=1,me%inter_npes |
---|
| 494 | aPE => me%PEs(i) |
---|
| 495 | tag = 300 |
---|
| 496 | do j=1,aPE%Nr_arrays |
---|
| 497 | ar => aPE%array_list(j) |
---|
| 498 | if(ar%NrDims == 2) then |
---|
| 499 | arlen = aPE%NrEle ! 2D |
---|
| 500 | else if(ar%NrDims == 3) then |
---|
| 501 | arlen = aPE%NrEle*ar%A_dim(1) ! 3D |
---|
| 502 | end if |
---|
| 503 | |
---|
| 504 | tag = tag+1 |
---|
| 505 | rCount = rCount+1 |
---|
| 506 | if(aPE%NrEle > 0) then |
---|
| 507 | CALL MPI_Isend (myIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, req(rCount),ierr) |
---|
| 508 | ar%SendIndex = myIndex |
---|
| 509 | else |
---|
| 510 | CALL MPI_Isend (NoIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, req(rCount),ierr) |
---|
| 511 | ar%SendIndex = NoIndex |
---|
| 512 | end if |
---|
| 513 | |
---|
| 514 | if(rCount == 1024) then ! Maximum of 1024 outstanding requests |
---|
| 515 | CALL MPI_Waitall (rCount, req, MPI_STATUSES_IGNORE, ierr) |
---|
| 516 | rCount = 0; |
---|
| 517 | end if |
---|
| 518 | |
---|
| 519 | if(aPE%NrEle > 0) then |
---|
| 520 | ar%SendSize = arlen |
---|
| 521 | myIndex = myIndex+arlen |
---|
| 522 | bufsize = bufsize+arlen |
---|
| 523 | end if |
---|
[1762] | 524 | end do |
---|
[1786] | 525 | if(rCount > 0) then ! Wait for all send completed |
---|
| 526 | CALL MPI_Waitall (rCount, req, MPI_STATUSES_IGNORE, ierr) |
---|
| 527 | end if |
---|
[1762] | 528 | end do |
---|
| 529 | |
---|
[1786] | 530 | !-- Create RMA (One Sided Communication) window for data buffer client to server transfer |
---|
| 531 | !-- The buffer of MPI_Get (counter part of transfer) can be PE-local, i.e. it can but must not be part of the MPI RMA window |
---|
| 532 | !-- Only one RMA window is required to prepare the data for server -> client transfer on the server side and |
---|
| 533 | !-- for client -> server transfer on the client side |
---|
[1762] | 534 | |
---|
[1786] | 535 | CALL PMC_Alloc_mem (base_array_cs, bufsize) |
---|
[1762] | 536 | me%TotalBufferSize = bufsize*wp !Total buffer size in Byte |
---|
| 537 | |
---|
| 538 | WinSize = me%TotalBufferSize |
---|
[1786] | 539 | CALL MPI_Win_create (base_array_cs, WinSize, wp, MPI_INFO_NULL, me%intra_comm, me%win_server_client, ierr); |
---|
| 540 | CALL MPI_Win_fence (0, me%win_server_client, ierr); ! Open Window to set data |
---|
[1762] | 541 | CALL MPI_Barrier(me%intra_comm, ierr) |
---|
| 542 | |
---|
[1786] | 543 | !-- Second stride, Set Buffer pointer |
---|
| 544 | |
---|
[1762] | 545 | do i=1,me%inter_npes |
---|
| 546 | aPE => me%PEs(i) |
---|
| 547 | |
---|
[1779] | 548 | do j=1,aPE%Nr_arrays |
---|
| 549 | ar => aPE%array_list(j) |
---|
[1786] | 550 | if(aPE%NrEle > 0) then |
---|
| 551 | ar%SendBuf = c_loc(base_array_cs(ar%SendIndex)) |
---|
| 552 | if(ar%SendIndex+ar%SendSize > bufsize) then |
---|
| 553 | write(0,'(a,i4,4i7,1x,a)') 'Client Buffer too small ',i,ar%SendIndex,ar%SendSize,ar%SendIndex+ar%SendSize,bufsize,trim(ar%name) |
---|
| 554 | CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr) |
---|
| 555 | end if |
---|
| 556 | end if |
---|
[1762] | 557 | end do |
---|
| 558 | end do |
---|
| 559 | |
---|
| 560 | return |
---|
| 561 | END SUBROUTINE PMC_C_setInd_and_AllocMem |
---|
| 562 | |
---|
| 563 | SUBROUTINE PMC_C_GetBuffer (WaitTime) |
---|
[1779] | 564 | |
---|
[1762] | 565 | IMPLICIT none |
---|
| 566 | |
---|
[1779] | 567 | REAL(wp), INTENT(OUT), optional :: WaitTime |
---|
| 568 | |
---|
[1762] | 569 | !-- local variables |
---|
[1779] | 570 | INTEGER :: ip, ij, ierr, j |
---|
| 571 | INTEGER :: nr ! Number of Elements to getb from server |
---|
| 572 | INTEGER :: myIndex |
---|
| 573 | REAL(wp) :: t1,t2 |
---|
| 574 | TYPE(PeDef),POINTER :: aPE |
---|
| 575 | TYPE(ArrayDef),POINTER :: ar |
---|
| 576 | INTEGER,DIMENSION(1) :: buf_shape |
---|
| 577 | REAL(wp),POINTER,DIMENSION(:) :: buf |
---|
| 578 | REAL(wp),POINTER,DIMENSION(:,:) :: data_2d |
---|
| 579 | REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d |
---|
| 580 | character(len=DA_Namelen) :: myName |
---|
| 581 | INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp |
---|
[1762] | 582 | |
---|
[1797] | 583 | ! |
---|
| 584 | !-- Synchronization of the model is done in pmci_client_synchronize and pmci_server_synchronize |
---|
| 585 | !-- Therefor the RMA window can be filled without sychronization at this point and a barrier |
---|
| 586 | !-- is not necessary |
---|
| 587 | !-- Please note that WaitTime has to be set in PMC_S_FillBuffer AND PMC_C_GetBuffer |
---|
| 588 | if(present(WaitTime)) then |
---|
| 589 | t1 = PMC_Time() |
---|
| 590 | CALL MPI_Barrier(me%intra_comm, ierr) |
---|
| 591 | t2 = PMC_Time() |
---|
| 592 | WaitTime = t2-t1 |
---|
| 593 | end if |
---|
[1762] | 594 | |
---|
| 595 | CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for buffer is filled |
---|
| 596 | |
---|
| 597 | do ip=1,me%inter_npes |
---|
| 598 | aPE => me%PEs(ip) |
---|
| 599 | |
---|
[1779] | 600 | do j=1,aPE%Nr_arrays |
---|
| 601 | ar => aPE%array_list(j) |
---|
| 602 | if(ar%NrDims == 2) then |
---|
[1762] | 603 | nr = aPE%NrEle |
---|
[1779] | 604 | else if(ar%NrDims == 3) then |
---|
[1762] | 605 | nr = aPE%NrEle*ar%A_dim(1) |
---|
| 606 | end if |
---|
| 607 | |
---|
| 608 | buf_shape(1) = nr |
---|
[1786] | 609 | CALL c_f_pointer(ar%RecvBuf, buf, buf_shape) |
---|
[1779] | 610 | ! |
---|
| 611 | !-- MPI passive target RMA |
---|
[1762] | 612 | if(nr > 0) then |
---|
[1786] | 613 | target_disp = (ar%RecvIndex-1) |
---|
| 614 | CALL MPI_Win_lock (MPI_LOCK_SHARED , ip-1, 0, me%win_server_client, ierr) |
---|
| 615 | CALL MPI_Get (buf, nr, MPI_REAL, ip-1, target_disp, nr, MPI_REAL, me%win_server_client, ierr) |
---|
| 616 | CALL MPI_Win_unlock (ip-1, me%win_server_client, ierr) |
---|
[1762] | 617 | end if |
---|
| 618 | |
---|
| 619 | myIndex = 1 |
---|
[1779] | 620 | if(ar%NrDims == 2) then |
---|
[1762] | 621 | |
---|
| 622 | CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) |
---|
| 623 | do ij=1,aPE%NrEle |
---|
| 624 | data_2d(aPE%locInd(ij)%j,aPE%locInd(ij)%i) = buf(myIndex) |
---|
| 625 | myIndex = myIndex+1 |
---|
| 626 | end do |
---|
[1779] | 627 | else if(ar%NrDims == 3) then |
---|
[1762] | 628 | CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) |
---|
| 629 | do ij=1,aPE%NrEle |
---|
| 630 | data_3d(:,aPE%locInd(ij)%j,aPE%locInd(ij)%i) = buf(myIndex:myIndex+ar%A_dim(1)-1) |
---|
| 631 | myIndex = myIndex+ar%A_dim(1) |
---|
| 632 | end do |
---|
| 633 | end if |
---|
| 634 | |
---|
| 635 | end do |
---|
| 636 | end do |
---|
| 637 | return |
---|
| 638 | END SUBROUTINE PMC_C_GetBuffer |
---|
| 639 | |
---|
| 640 | SUBROUTINE PMC_C_PutBuffer (WaitTime) |
---|
[1779] | 641 | |
---|
[1762] | 642 | IMPLICIT none |
---|
| 643 | |
---|
[1779] | 644 | REAL(wp), INTENT(OUT), optional :: WaitTime |
---|
| 645 | |
---|
[1762] | 646 | !-- local variables |
---|
[1779] | 647 | INTEGER :: ip, ij, ierr, j |
---|
| 648 | INTEGER :: nr ! Number of Elements to getb from server |
---|
| 649 | INTEGER :: myIndex |
---|
| 650 | REAL(wp) :: t1,t2 |
---|
| 651 | TYPE(PeDef),POINTER :: aPE |
---|
| 652 | TYPE(ArrayDef),POINTER :: ar |
---|
| 653 | INTEGER,DIMENSION(1) :: buf_shape |
---|
| 654 | REAL(wp),POINTER,DIMENSION(:) :: buf |
---|
| 655 | REAL(wp),POINTER,DIMENSION(:,:) :: data_2d |
---|
| 656 | REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d |
---|
| 657 | character(len=DA_Namelen) :: myName |
---|
| 658 | INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp |
---|
[1762] | 659 | |
---|
[1786] | 660 | t1 = PMC_Time() |
---|
| 661 | CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for empty buffer |
---|
| 662 | t2 = PMC_Time() |
---|
| 663 | if(present(WaitTime)) WaitTime = t2-t1 |
---|
[1762] | 664 | |
---|
| 665 | do ip=1,me%inter_npes |
---|
| 666 | aPE => me%PEs(ip) |
---|
| 667 | |
---|
[1779] | 668 | do j=1,aPE%Nr_arrays |
---|
| 669 | ar => aPE%array_list(j) |
---|
[1786] | 670 | myIndex=1 |
---|
[1779] | 671 | if(ar%NrDims == 2) then |
---|
[1786] | 672 | buf_shape(1) = aPE%NrEle |
---|
| 673 | CALL c_f_pointer(ar%SendBuf, buf, buf_shape) |
---|
[1762] | 674 | CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) |
---|
| 675 | do ij=1,aPE%NrEle |
---|
| 676 | buf(myIndex) = data_2d(aPE%locInd(ij)%j,aPE%locInd(ij)%i) |
---|
| 677 | myIndex = myIndex+1 |
---|
| 678 | end do |
---|
[1779] | 679 | else if(ar%NrDims == 3) then |
---|
[1786] | 680 | buf_shape(1) = aPE%NrEle*ar%A_dim(1) |
---|
| 681 | CALL c_f_pointer(ar%SendBuf, buf, buf_shape) |
---|
[1762] | 682 | CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) |
---|
| 683 | do ij=1,aPE%NrEle |
---|
| 684 | buf(myIndex:myIndex+ar%A_dim(1)-1) = data_3d(:,aPE%locInd(ij)%j,aPE%locInd(ij)%i) |
---|
| 685 | myIndex = myIndex+ar%A_dim(1) |
---|
| 686 | end do |
---|
| 687 | end if |
---|
| 688 | end do |
---|
| 689 | end do |
---|
| 690 | |
---|
| 691 | |
---|
[1786] | 692 | ! CALL MPI_Win_fence (0, me%win_server_client, ierr) ! Fence might do it, test later |
---|
| 693 | CALL MPI_Barrier(me%intra_comm, ierr) ! buffer is filled |
---|
[1762] | 694 | |
---|
| 695 | return |
---|
| 696 | END SUBROUTINE PMC_C_PutBuffer |
---|
| 697 | |
---|
[1764] | 698 | #endif |
---|
[1762] | 699 | END MODULE pmc_client |
---|