[1762] | 1 | MODULE pmc_client |
---|
| 2 | |
---|
| 3 | !--------------------------------------------------------------------------------! |
---|
| 4 | ! This file is part of PALM. |
---|
| 5 | ! |
---|
| 6 | ! PALM is free software: you can redistribute it and/or modify it under the terms |
---|
| 7 | ! of the GNU General Public License as published by the Free Software Foundation, |
---|
| 8 | ! either version 3 of the License, or (at your option) any later version. |
---|
| 9 | ! |
---|
| 10 | ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY |
---|
| 11 | ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR |
---|
| 12 | ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
---|
| 13 | ! |
---|
| 14 | ! You should have received a copy of the GNU General Public License along with |
---|
| 15 | ! PALM. If not, see <http://www.gnu.org/licenses/>. |
---|
| 16 | ! |
---|
[1818] | 17 | ! Copyright 1997-2016 Leibniz Universitaet Hannover |
---|
[1762] | 18 | !--------------------------------------------------------------------------------! |
---|
| 19 | ! |
---|
| 20 | ! Current revisions: |
---|
| 21 | ! ------------------ |
---|
[1834] | 22 | ! |
---|
| 23 | ! |
---|
[1762] | 24 | ! Former revisions: |
---|
| 25 | ! ----------------- |
---|
| 26 | ! $Id: pmc_client.f90 1834 2016-04-07 14:34:20Z raasch $ |
---|
| 27 | ! |
---|
[1834] | 28 | ! 1833 2016-04-07 14:23:03Z raasch |
---|
| 29 | ! gfortran requires pointer attributes for some array declarations, |
---|
| 30 | ! long line wrapped |
---|
| 31 | ! |
---|
[1809] | 32 | ! 1808 2016-04-05 19:44:00Z raasch |
---|
| 33 | ! MPI module used by default on all machines |
---|
| 34 | ! |
---|
[1798] | 35 | ! 1797 2016-03-21 16:50:28Z raasch |
---|
| 36 | ! introduction of different datatransfer modes |
---|
| 37 | ! |
---|
[1792] | 38 | ! 1791 2016-03-11 10:41:25Z raasch |
---|
| 39 | ! Debug write-statement commented out |
---|
| 40 | ! |
---|
[1787] | 41 | ! 1786 2016-03-08 05:49:27Z raasch |
---|
| 42 | ! change in client-server data transfer: server now gets data from client |
---|
| 43 | ! instead that client put's it to the server |
---|
| 44 | ! |
---|
[1784] | 45 | ! 1783 2016-03-06 18:36:17Z raasch |
---|
| 46 | ! Bugfix: wrong data-type in MPI_WIN_CREATE replaced |
---|
| 47 | ! |
---|
[1780] | 48 | ! 1779 2016-03-03 08:01:28Z raasch |
---|
| 49 | ! kind=dp replaced by wp, dim_order removed |
---|
| 50 | ! array management changed from linked list to sequential loop |
---|
| 51 | ! |
---|
[1765] | 52 | ! 1764 2016-02-28 12:45:19Z raasch |
---|
| 53 | ! cpp-statement added (nesting can only be used in parallel mode), |
---|
| 54 | ! all kinds given in PALM style |
---|
| 55 | ! |
---|
[1763] | 56 | ! 1762 2016-02-25 12:31:13Z hellstea |
---|
| 57 | ! Initial revision by K. Ketelsen |
---|
[1762] | 58 | ! |
---|
| 59 | ! Description: |
---|
| 60 | ! ------------ |
---|
| 61 | ! |
---|
| 62 | ! Client part of Palm Model Coupler |
---|
| 63 | !------------------------------------------------------------------------------! |
---|
| 64 | |
---|
[1764] | 65 | #if defined( __parallel ) |
---|
[1762] | 66 | |
---|
| 67 | use, intrinsic :: iso_c_binding |
---|
| 68 | |
---|
[1808] | 69 | #if defined( __mpifh ) |
---|
| 70 | INCLUDE "mpif.h" |
---|
| 71 | #else |
---|
[1764] | 72 | USE MPI |
---|
| 73 | #endif |
---|
| 74 | USE kinds |
---|
[1762] | 75 | USE PMC_general, ONLY: ClientDef, DA_NameDef, DA_Namelen, PMC_STATUS_OK, PMC_DA_NAME_ERR, PeDef, ArrayDef, & |
---|
[1779] | 76 | DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_MAX_ARRAY |
---|
[1762] | 77 | USE PMC_handle_communicator, ONLY: m_model_comm,m_model_rank,m_model_npes, m_to_server_comm |
---|
| 78 | USE PMC_MPI_wrapper, ONLY: PMC_Send_to_Server, PMC_Recv_from_Server, PMC_Time, & |
---|
| 79 | PMC_Bcast, PMC_Inter_Bcast, PMC_Alloc_mem |
---|
| 80 | IMPLICIT none |
---|
| 81 | PRIVATE |
---|
| 82 | SAVE |
---|
| 83 | |
---|
| 84 | Type(ClientDef) :: me |
---|
| 85 | |
---|
[1779] | 86 | INTEGER :: next_array_in_list = 0 |
---|
| 87 | INTEGER :: myIndex = 0 !Counter and unique number for Data Arrays |
---|
[1762] | 88 | |
---|
| 89 | ! INTERFACE section |
---|
| 90 | |
---|
| 91 | INTERFACE PMC_ClientInit |
---|
| 92 | MODULE procedure PMC_ClientInit |
---|
| 93 | END INTERFACE PMC_ClientInit |
---|
| 94 | |
---|
| 95 | INTERFACE PMC_Set_DataArray_Name |
---|
| 96 | MODULE procedure PMC_Set_DataArray_Name |
---|
| 97 | MODULE procedure PMC_Set_DataArray_Name_LastEntry |
---|
| 98 | END INTERFACE PMC_Set_DataArray_Name |
---|
| 99 | |
---|
| 100 | INTERFACE PMC_C_Get_2D_index_list |
---|
| 101 | MODULE procedure PMC_C_Get_2D_index_list |
---|
| 102 | END INTERFACE PMC_C_Get_2D_index_list |
---|
| 103 | |
---|
[1779] | 104 | INTERFACE PMC_C_clear_next_array_list |
---|
| 105 | MODULE procedure PMC_C_clear_next_array_list |
---|
| 106 | END INTERFACE PMC_C_clear_next_array_list |
---|
| 107 | |
---|
[1762] | 108 | INTERFACE PMC_C_GetNextArray |
---|
| 109 | MODULE procedure PMC_C_GetNextArray |
---|
| 110 | END INTERFACE PMC_C_GetNextArray |
---|
| 111 | |
---|
| 112 | INTERFACE PMC_C_Set_DataArray |
---|
| 113 | MODULE procedure PMC_C_Set_DataArray_2d |
---|
| 114 | MODULE procedure PMC_C_Set_DataArray_3d |
---|
| 115 | END INTERFACE PMC_C_Set_DataArray |
---|
| 116 | |
---|
| 117 | INTERFACE PMC_C_setInd_and_AllocMem |
---|
| 118 | MODULE procedure PMC_C_setInd_and_AllocMem |
---|
| 119 | END INTERFACE PMC_C_setInd_and_AllocMem |
---|
| 120 | |
---|
| 121 | INTERFACE PMC_C_GetBuffer |
---|
| 122 | MODULE procedure PMC_C_GetBuffer |
---|
| 123 | END INTERFACE PMC_C_GetBuffer |
---|
| 124 | |
---|
| 125 | INTERFACE PMC_C_PutBuffer |
---|
| 126 | MODULE procedure PMC_C_PutBuffer |
---|
| 127 | END INTERFACE PMC_C_PutBuffer |
---|
| 128 | |
---|
| 129 | ! Public section |
---|
| 130 | |
---|
| 131 | PUBLIC PMC_ClientInit , PMC_Set_DataArray_Name, PMC_C_Get_2D_index_list |
---|
[1779] | 132 | PUBLIC PMC_C_GetNextArray, PMC_C_Set_DataArray, PMC_C_clear_next_array_list |
---|
| 133 | PUBLIC PMC_C_setInd_and_AllocMem , PMC_C_GetBuffer, PMC_C_PutBuffer |
---|
[1762] | 134 | |
---|
| 135 | CONTAINS |
---|
| 136 | |
---|
| 137 | SUBROUTINE PMC_ClientInit |
---|
| 138 | IMPLICIT none |
---|
| 139 | |
---|
| 140 | INTEGER :: i |
---|
| 141 | INTEGER :: istat |
---|
| 142 | |
---|
| 143 | |
---|
| 144 | ! Tailor MPI environment |
---|
| 145 | |
---|
| 146 | me%model_comm = m_model_comm |
---|
| 147 | me%inter_comm = m_to_server_comm |
---|
| 148 | |
---|
| 149 | ! Get rank and size |
---|
| 150 | CALL MPI_Comm_rank (me%model_comm, me%model_rank, istat); |
---|
| 151 | CALL MPI_Comm_size (me%model_comm, me%model_npes, istat); |
---|
| 152 | CALL MPI_Comm_remote_size (me%inter_comm, me%inter_npes, istat); |
---|
| 153 | |
---|
| 154 | ! intra communicater is used for MPI_Get |
---|
| 155 | CALL MPI_Intercomm_merge (me%inter_comm, .true., me%intra_comm, istat); |
---|
| 156 | CALL MPI_Comm_rank (me%intra_comm, me%intra_rank, istat); |
---|
| 157 | ALLOCATE (me%PEs(me%inter_npes)) |
---|
| 158 | |
---|
[1779] | 159 | ! |
---|
| 160 | !-- Allocate for all Server PEs an array of TYPE ArrayDef to store information of transfer array |
---|
[1762] | 161 | do i=1,me%inter_npes |
---|
[1779] | 162 | ALLOCATE(me%PEs(i)%array_list(PMC_MAX_ARRAY)) |
---|
[1762] | 163 | end do |
---|
| 164 | |
---|
[1791] | 165 | ! if(me%model_rank == 0) write(0,'(a,5i6)') 'PMC_ClientInit ',me%model_rank,me%model_npes,me%inter_npes,me%intra_rank |
---|
[1762] | 166 | |
---|
| 167 | return |
---|
| 168 | END SUBROUTINE PMC_ClientInit |
---|
| 169 | |
---|
[1779] | 170 | SUBROUTINE PMC_Set_DataArray_Name (ServerArrayDesc, ServerArrayName, ClientArrayDesc, ClientArrayName, istat) |
---|
[1762] | 171 | IMPLICIT none |
---|
| 172 | character(len=*),INTENT(IN) :: ServerArrayName |
---|
| 173 | character(len=*),INTENT(IN) :: ServerArrayDesc |
---|
| 174 | character(len=*),INTENT(IN) :: ClientArrayName |
---|
| 175 | character(len=*),INTENT(IN) :: ClientArrayDesc |
---|
| 176 | INTEGER,INTENT(OUT) :: istat |
---|
| 177 | |
---|
| 178 | !-- local variables |
---|
| 179 | type(DA_NameDef) :: myName |
---|
| 180 | INTEGER :: myPe |
---|
| 181 | INTEGER :: my_AddiArray=0 |
---|
| 182 | |
---|
| 183 | istat = PMC_STATUS_OK |
---|
| 184 | if(len(trim(ServerArrayName)) > DA_Namelen .or. & |
---|
| 185 | len(trim(ClientArrayName)) > DA_Namelen) then !Name too long |
---|
| 186 | istat = PMC_DA_NAME_ERR |
---|
| 187 | end if |
---|
| 188 | |
---|
| 189 | if(m_model_rank == 0) then |
---|
| 190 | myIndex = myIndex+1 |
---|
| 191 | myName%couple_index = myIndex |
---|
| 192 | myName%ServerDesc = trim(ServerArrayDesc) |
---|
| 193 | myName%NameOnServer = trim(ServerArrayName) |
---|
| 194 | myName%ClientDesc = trim(ClientArrayDesc) |
---|
| 195 | myName%NameOnClient = trim(ClientArrayName) |
---|
| 196 | end if |
---|
| 197 | |
---|
| 198 | ! Broadcat to all Client PEs |
---|
| 199 | |
---|
| 200 | CALL PMC_Bcast ( myName%couple_index, 0, comm=m_model_comm) |
---|
| 201 | CALL PMC_Bcast ( myName%ServerDesc, 0, comm=m_model_comm) |
---|
| 202 | CALL PMC_Bcast ( myName%NameOnServer, 0, comm=m_model_comm) |
---|
| 203 | CALL PMC_Bcast ( myName%ClientDesc, 0, comm=m_model_comm) |
---|
| 204 | CALL PMC_Bcast ( myName%NameOnClient, 0, comm=m_model_comm) |
---|
| 205 | |
---|
| 206 | ! Broadcat to all Server PEs |
---|
| 207 | |
---|
| 208 | if(m_model_rank == 0) then |
---|
| 209 | myPE = MPI_ROOT |
---|
| 210 | else |
---|
| 211 | myPE = MPI_PROC_NULL |
---|
| 212 | endif |
---|
| 213 | CALL PMC_Bcast ( myName%couple_index, myPE, comm=m_to_server_comm) |
---|
| 214 | CALL PMC_Bcast ( myName%ServerDesc, myPE, comm=m_to_server_comm) |
---|
| 215 | CALL PMC_Bcast ( myName%NameOnServer, myPE, comm=m_to_server_comm) |
---|
| 216 | CALL PMC_Bcast ( myName%ClientDesc, myPE, comm=m_to_server_comm) |
---|
| 217 | CALL PMC_Bcast ( myName%NameOnClient, myPE, comm=m_to_server_comm) |
---|
| 218 | |
---|
| 219 | CALL PMC_G_SetName (me, myName%couple_index, myName%NameOnClient) |
---|
| 220 | |
---|
| 221 | return |
---|
| 222 | END SUBROUTINE PMC_Set_DataArray_Name |
---|
| 223 | |
---|
| 224 | SUBROUTINE PMC_Set_DataArray_Name_LastEntry (LastEntry) |
---|
| 225 | IMPLICIT none |
---|
| 226 | LOGICAL,INTENT(IN),optional :: LastEntry |
---|
| 227 | |
---|
| 228 | !-- local variables |
---|
| 229 | type(DA_NameDef) :: myName |
---|
| 230 | INTEGER :: myPe |
---|
| 231 | |
---|
| 232 | myName%couple_index = -1 |
---|
| 233 | |
---|
| 234 | if(m_model_rank == 0) then |
---|
| 235 | myPE = MPI_ROOT |
---|
| 236 | else |
---|
| 237 | myPE = MPI_PROC_NULL |
---|
| 238 | endif |
---|
| 239 | CALL PMC_Bcast ( myName%couple_index, myPE, comm=m_to_server_comm) |
---|
| 240 | |
---|
| 241 | return |
---|
| 242 | END SUBROUTINE PMC_Set_DataArray_Name_LastEntry |
---|
| 243 | |
---|
| 244 | SUBROUTINE PMC_C_Get_2D_index_list |
---|
| 245 | IMPLICIT none |
---|
| 246 | |
---|
| 247 | INTEGER :: i,j,i2,nr,ierr |
---|
| 248 | INTEGER :: dummy |
---|
| 249 | INTEGER :: indWin !: MPI window object |
---|
| 250 | INTEGER :: indWin2 !: MPI window object |
---|
| 251 | INTEGER(KIND=MPI_ADDRESS_KIND) :: win_size !: Size of MPI window 1 (in bytes) |
---|
| 252 | INTEGER(KIND=MPI_ADDRESS_KIND) :: disp !: Displacement Unit (Integer = 4, floating poit = 8 |
---|
| 253 | INTEGER,DIMENSION(me%inter_npes*2) :: NrEle !: Number of Elements of a horizontal slice |
---|
[1779] | 254 | TYPE(PeDef),POINTER :: aPE !: Pointer to PeDef structure |
---|
[1762] | 255 | INTEGER(KIND=MPI_ADDRESS_KIND) :: WinSize !: Size of MPI window 2 (in bytes) |
---|
| 256 | INTEGER,DIMENSION(:),POINTER :: myInd |
---|
| 257 | |
---|
| 258 | ! CALL PMC_C_CGet_Rem_index_list |
---|
| 259 | |
---|
| 260 | win_size = c_sizeof(dummy) |
---|
| 261 | CALL MPI_Win_create (dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm, indWin, ierr); |
---|
| 262 | CALL MPI_Win_fence (0, indWin, ierr) ! Open Window on Server side |
---|
| 263 | CALL MPI_Win_fence (0, indWin, ierr) ! Close Window on Server Side and opem on Client side |
---|
| 264 | |
---|
| 265 | do i=1,me%inter_npes |
---|
| 266 | disp = me%model_rank*2 |
---|
| 267 | CALL MPI_Get (NrEle((i-1)*2+1),2,MPI_INTEGER,i-1,disp,2,MPI_INTEGER,indWin, ierr) |
---|
| 268 | end do |
---|
| 269 | CALL MPI_Win_fence (0, indWin, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 270 | |
---|
| 271 | WinSize = 0 |
---|
| 272 | do i=1,me%inter_npes !Allocate memory for index array |
---|
| 273 | aPE => me%PEs(i) |
---|
| 274 | i2 = (i-1)*2+1 |
---|
| 275 | nr = NrEle(i2+1) |
---|
| 276 | if(nr > 0) then |
---|
| 277 | ALLOCATE(aPE%locInd(nr)) |
---|
| 278 | else |
---|
| 279 | NULLIFY (aPE%locInd) |
---|
| 280 | endif |
---|
| 281 | WinSize = max(nr,WinSize) !Maximum window size |
---|
| 282 | end do |
---|
| 283 | |
---|
| 284 | ALLOCATE(myInd(2*WinSize)) |
---|
[1783] | 285 | WinSize = 1 |
---|
[1762] | 286 | |
---|
| 287 | ! local Buffer used in MPI_Get can but must not be inside the MPI Window |
---|
| 288 | ! Here, we use a dummy for MPI Window because the server PEs do not access the RMA window via MPI_get or MPI_Put |
---|
| 289 | |
---|
[1783] | 290 | CALL MPI_Win_create (dummy, WinSize, iwp, MPI_INFO_NULL, me%intra_comm, indWin2, ierr); |
---|
[1762] | 291 | |
---|
| 292 | CALL MPI_Win_fence (0, indWin2, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 293 | CALL MPI_Win_fence (0, indWin2, ierr) ! MPI_get is Non-blocking -> data in NrEle is not available until MPI_fence CALL |
---|
| 294 | |
---|
| 295 | do i=1,me%inter_npes |
---|
| 296 | aPE => me%PEs(i) |
---|
| 297 | nr = NrEle(i*2) |
---|
| 298 | if(nr > 0 ) then |
---|
| 299 | disp = NrEle(2*(i-1)+1) |
---|
| 300 | CALL MPI_Win_lock (MPI_LOCK_SHARED , i-1, 0, indWin2, ierr) |
---|
| 301 | CALL MPI_Get (myInd,2*nr,MPI_INTEGER,i-1,disp,2*nr,MPI_INTEGER,indWin2, ierr) |
---|
| 302 | CALL MPI_Win_unlock (i-1, indWin2, ierr) |
---|
| 303 | do j=1,nr |
---|
| 304 | aPE%locInd(j)%i = myInd(2*j-1) |
---|
| 305 | aPE%locInd(j)%j = myInd(2*j) |
---|
| 306 | end do |
---|
| 307 | aPE%NrEle = nr |
---|
| 308 | else |
---|
| 309 | aPE%NrEle = -1 |
---|
| 310 | end if |
---|
| 311 | end do |
---|
| 312 | |
---|
| 313 | CALL MPI_Barrier(me%intra_comm, ierr) ! Dont know why, but this barrier is necessary before we can free the windows |
---|
| 314 | |
---|
| 315 | CALL MPI_Win_free(indWin, ierr); |
---|
| 316 | CALL MPI_Win_free(indWin2, ierr); |
---|
| 317 | DEALLOCATE (myInd) |
---|
| 318 | |
---|
| 319 | return |
---|
| 320 | END SUBROUTINE PMC_C_Get_2D_index_list |
---|
| 321 | |
---|
[1779] | 322 | SUBROUTINE PMC_C_clear_next_array_list |
---|
| 323 | IMPLICIT none |
---|
| 324 | |
---|
| 325 | next_array_in_list = 0 |
---|
| 326 | |
---|
| 327 | return |
---|
| 328 | END SUBROUTINE PMC_C_clear_next_array_list |
---|
| 329 | |
---|
| 330 | ! List handling is still required to get minimal interaction with pmc_interface |
---|
[1762] | 331 | LOGICAL function PMC_C_GetNextArray (myName) |
---|
| 332 | character(len=*),INTENT(OUT) :: myName |
---|
| 333 | |
---|
| 334 | !-- local variables |
---|
[1779] | 335 | TYPE(PeDef),POINTER :: aPE |
---|
| 336 | TYPE(ArrayDef),POINTER :: ar |
---|
[1762] | 337 | |
---|
[1779] | 338 | next_array_in_list = next_array_in_list+1 |
---|
[1762] | 339 | |
---|
[1779] | 340 | !-- Array Names are the same on all client PE, so take first PE to get the name |
---|
| 341 | aPE => me%PEs(1) |
---|
[1762] | 342 | |
---|
[1779] | 343 | if(next_array_in_list > aPE%Nr_arrays) then |
---|
| 344 | PMC_C_GetNextArray = .false. !all arrays done |
---|
| 345 | return |
---|
| 346 | end if |
---|
[1762] | 347 | |
---|
[1779] | 348 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 349 | |
---|
[1779] | 350 | myName = ar%name |
---|
| 351 | |
---|
| 352 | PMC_C_GetNextArray = .true. ! Return true if legal array |
---|
| 353 | return |
---|
[1762] | 354 | END function PMC_C_GetNextArray |
---|
| 355 | |
---|
| 356 | SUBROUTINE PMC_C_Set_DataArray_2d (array) |
---|
[1779] | 357 | |
---|
[1762] | 358 | IMPLICIT none |
---|
| 359 | |
---|
[1833] | 360 | REAL(wp), INTENT(IN) ,DIMENSION(:,:), POINTER :: array |
---|
[1762] | 361 | |
---|
[1779] | 362 | INTEGER :: NrDims |
---|
| 363 | INTEGER,DIMENSION (4) :: dims |
---|
| 364 | TYPE(c_ptr) :: array_adr |
---|
| 365 | INTEGER :: i |
---|
| 366 | TYPE(PeDef),POINTER :: aPE |
---|
| 367 | TYPE(ArrayDef),POINTER :: ar |
---|
| 368 | |
---|
| 369 | |
---|
[1762] | 370 | dims = 1 |
---|
| 371 | |
---|
| 372 | NrDims = 2 |
---|
| 373 | dims(1) = size(array,1) |
---|
| 374 | dims(2) = size(array,2) |
---|
| 375 | |
---|
| 376 | array_adr = c_loc(array) |
---|
| 377 | |
---|
| 378 | do i=1,me%inter_npes |
---|
| 379 | aPE => me%PEs(i) |
---|
[1786] | 380 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 381 | ar%NrDims = NrDims |
---|
| 382 | ar%A_dim = dims |
---|
| 383 | ar%data = array_adr |
---|
| 384 | end do |
---|
| 385 | |
---|
| 386 | return |
---|
| 387 | END SUBROUTINE PMC_C_Set_DataArray_2d |
---|
| 388 | |
---|
| 389 | SUBROUTINE PMC_C_Set_DataArray_3d (array) |
---|
[1779] | 390 | |
---|
[1762] | 391 | IMPLICIT none |
---|
| 392 | |
---|
[1833] | 393 | REAL(wp),INTENT(IN),DIMENSION(:,:,:), POINTER :: array |
---|
[1779] | 394 | |
---|
| 395 | INTEGER :: NrDims |
---|
| 396 | INTEGER,DIMENSION (4) :: dims |
---|
| 397 | TYPE(c_ptr) :: array_adr |
---|
| 398 | INTEGER :: i |
---|
| 399 | TYPE(PeDef),POINTER :: aPE |
---|
| 400 | TYPE(ArrayDef),POINTER :: ar |
---|
| 401 | |
---|
[1762] | 402 | dims = 1 |
---|
| 403 | |
---|
| 404 | NrDims = 3 |
---|
| 405 | dims(1) = size(array,1) |
---|
| 406 | dims(2) = size(array,2) |
---|
| 407 | dims(3) = size(array,3) |
---|
| 408 | |
---|
| 409 | array_adr = c_loc(array) |
---|
| 410 | |
---|
| 411 | do i=1,me%inter_npes |
---|
| 412 | aPE => me%PEs(i) |
---|
[1797] | 413 | ar => aPE%array_list(next_array_in_list) |
---|
[1762] | 414 | ar%NrDims = NrDims |
---|
| 415 | ar%A_dim = dims |
---|
| 416 | ar%data = array_adr |
---|
| 417 | end do |
---|
| 418 | |
---|
| 419 | return |
---|
| 420 | END SUBROUTINE PMC_C_Set_DataArray_3d |
---|
| 421 | |
---|
| 422 | SUBROUTINE PMC_C_setInd_and_AllocMem |
---|
[1764] | 423 | |
---|
[1762] | 424 | IMPLICIT none |
---|
| 425 | |
---|
[1786] | 426 | !-- naming convention: appending _sc -> server to client transfer |
---|
| 427 | !-- _cs -> client to server transfer |
---|
| 428 | !-- Recv -> server to client transfer |
---|
| 429 | !-- Send -> client to server transfer |
---|
| 430 | |
---|
| 431 | INTEGER :: i, istat, ierr, j |
---|
| 432 | INTEGER,PARAMETER :: NoINdex=-1 |
---|
| 433 | INTEGER :: rcount |
---|
[1762] | 434 | INTEGER :: arlen, myIndex, tag |
---|
[1764] | 435 | INTEGER(idp) :: bufsize ! Size of MPI data Window |
---|
[1762] | 436 | TYPE(PeDef),POINTER :: aPE |
---|
| 437 | TYPE(ArrayDef),POINTER :: ar |
---|
[1786] | 438 | INTEGER,DIMENSION(1024) :: req |
---|
[1762] | 439 | character(len=DA_Namelen) :: myName |
---|
| 440 | Type(c_ptr) :: base_ptr |
---|
[1786] | 441 | REAL(kind=wp),DIMENSION(:),POINTER,save :: base_array_sc !Base array |
---|
| 442 | REAL(kind=wp),DIMENSION(:),POINTER,save :: base_array_cs !Base array |
---|
[1762] | 443 | INTEGER(KIND=MPI_ADDRESS_KIND) :: WinSize |
---|
| 444 | |
---|
| 445 | myIndex = 0 |
---|
| 446 | bufsize = 8 |
---|
| 447 | |
---|
[1786] | 448 | !-- Server to client direction |
---|
[1762] | 449 | |
---|
[1786] | 450 | !-- First stride, Compute size and set index |
---|
| 451 | |
---|
[1762] | 452 | do i=1,me%inter_npes |
---|
| 453 | aPE => me%PEs(i) |
---|
| 454 | tag = 200 |
---|
| 455 | |
---|
[1779] | 456 | do j=1,aPE%Nr_arrays |
---|
| 457 | ar => aPE%array_list(j) |
---|
[1762] | 458 | |
---|
| 459 | ! Receive Index from client |
---|
| 460 | tag = tag+1 |
---|
| 461 | CALL MPI_Recv (myIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, MPI_STATUS_IGNORE, ierr) |
---|
| 462 | |
---|
[1786] | 463 | if(ar%NrDims == 3) then |
---|
[1762] | 464 | bufsize = max(bufsize,ar%A_dim(1)*ar%A_dim(2)*ar%A_dim(3)) ! determine max, because client buffer is allocated only once |
---|
| 465 | else |
---|
| 466 | bufsize = max(bufsize,ar%A_dim(1)*ar%A_dim(2)) |
---|
| 467 | end if |
---|
[1786] | 468 | ar%RecvIndex = myIndex |
---|
| 469 | |
---|
| 470 | end do |
---|
| 471 | end do |
---|
| 472 | |
---|
| 473 | |
---|
| 474 | !-- Create RMA (One Sided Communication) data buffer |
---|
| 475 | !-- The buffer for MPI_Get can be PE local, i.e. it can but must not be part of the MPI RMA window |
---|
| 476 | |
---|
| 477 | CALL PMC_Alloc_mem (base_array_sc, bufsize, base_ptr) |
---|
| 478 | me%TotalBufferSize = bufsize*wp ! Total buffer size in Byte |
---|
| 479 | |
---|
| 480 | !-- Second stride, Set Buffer pointer |
---|
| 481 | |
---|
| 482 | do i=1,me%inter_npes |
---|
| 483 | aPE => me%PEs(i) |
---|
| 484 | |
---|
| 485 | do j=1,aPE%Nr_arrays |
---|
| 486 | ar => aPE%array_list(j) |
---|
| 487 | ar%RecvBuf = base_ptr |
---|
| 488 | end do |
---|
| 489 | end do |
---|
| 490 | |
---|
| 491 | !-- Client to server direction |
---|
| 492 | |
---|
| 493 | myIndex = 1 |
---|
| 494 | rCount = 0 |
---|
| 495 | bufsize = 8 |
---|
| 496 | |
---|
| 497 | do i=1,me%inter_npes |
---|
| 498 | aPE => me%PEs(i) |
---|
| 499 | tag = 300 |
---|
| 500 | do j=1,aPE%Nr_arrays |
---|
| 501 | ar => aPE%array_list(j) |
---|
| 502 | if(ar%NrDims == 2) then |
---|
| 503 | arlen = aPE%NrEle ! 2D |
---|
| 504 | else if(ar%NrDims == 3) then |
---|
| 505 | arlen = aPE%NrEle*ar%A_dim(1) ! 3D |
---|
| 506 | end if |
---|
| 507 | |
---|
| 508 | tag = tag+1 |
---|
| 509 | rCount = rCount+1 |
---|
| 510 | if(aPE%NrEle > 0) then |
---|
| 511 | CALL MPI_Isend (myIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, req(rCount),ierr) |
---|
| 512 | ar%SendIndex = myIndex |
---|
| 513 | else |
---|
| 514 | CALL MPI_Isend (NoIndex, 1, MPI_INTEGER, i-1, tag, me%inter_comm, req(rCount),ierr) |
---|
| 515 | ar%SendIndex = NoIndex |
---|
| 516 | end if |
---|
| 517 | |
---|
| 518 | if(rCount == 1024) then ! Maximum of 1024 outstanding requests |
---|
| 519 | CALL MPI_Waitall (rCount, req, MPI_STATUSES_IGNORE, ierr) |
---|
| 520 | rCount = 0; |
---|
| 521 | end if |
---|
| 522 | |
---|
| 523 | if(aPE%NrEle > 0) then |
---|
| 524 | ar%SendSize = arlen |
---|
| 525 | myIndex = myIndex+arlen |
---|
| 526 | bufsize = bufsize+arlen |
---|
| 527 | end if |
---|
[1762] | 528 | end do |
---|
[1786] | 529 | if(rCount > 0) then ! Wait for all send completed |
---|
| 530 | CALL MPI_Waitall (rCount, req, MPI_STATUSES_IGNORE, ierr) |
---|
| 531 | end if |
---|
[1762] | 532 | end do |
---|
| 533 | |
---|
[1786] | 534 | !-- Create RMA (One Sided Communication) window for data buffer client to server transfer |
---|
| 535 | !-- The buffer of MPI_Get (counter part of transfer) can be PE-local, i.e. it can but must not be part of the MPI RMA window |
---|
| 536 | !-- Only one RMA window is required to prepare the data for server -> client transfer on the server side and |
---|
| 537 | !-- for client -> server transfer on the client side |
---|
[1762] | 538 | |
---|
[1786] | 539 | CALL PMC_Alloc_mem (base_array_cs, bufsize) |
---|
[1762] | 540 | me%TotalBufferSize = bufsize*wp !Total buffer size in Byte |
---|
| 541 | |
---|
| 542 | WinSize = me%TotalBufferSize |
---|
[1786] | 543 | CALL MPI_Win_create (base_array_cs, WinSize, wp, MPI_INFO_NULL, me%intra_comm, me%win_server_client, ierr); |
---|
| 544 | CALL MPI_Win_fence (0, me%win_server_client, ierr); ! Open Window to set data |
---|
[1762] | 545 | CALL MPI_Barrier(me%intra_comm, ierr) |
---|
| 546 | |
---|
[1786] | 547 | !-- Second stride, Set Buffer pointer |
---|
| 548 | |
---|
[1762] | 549 | do i=1,me%inter_npes |
---|
| 550 | aPE => me%PEs(i) |
---|
| 551 | |
---|
[1779] | 552 | do j=1,aPE%Nr_arrays |
---|
| 553 | ar => aPE%array_list(j) |
---|
[1786] | 554 | if(aPE%NrEle > 0) then |
---|
| 555 | ar%SendBuf = c_loc(base_array_cs(ar%SendIndex)) |
---|
| 556 | if(ar%SendIndex+ar%SendSize > bufsize) then |
---|
[1833] | 557 | write(0,'(a,i4,4i7,1x,a)') 'Client Buffer too small ',i, & |
---|
| 558 | ar%SendIndex,ar%SendSize,ar%SendIndex+ar%SendSize,bufsize,trim(ar%name) |
---|
[1786] | 559 | CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr) |
---|
| 560 | end if |
---|
| 561 | end if |
---|
[1762] | 562 | end do |
---|
| 563 | end do |
---|
| 564 | |
---|
| 565 | return |
---|
| 566 | END SUBROUTINE PMC_C_setInd_and_AllocMem |
---|
| 567 | |
---|
| 568 | SUBROUTINE PMC_C_GetBuffer (WaitTime) |
---|
[1779] | 569 | |
---|
[1762] | 570 | IMPLICIT none |
---|
| 571 | |
---|
[1779] | 572 | REAL(wp), INTENT(OUT), optional :: WaitTime |
---|
| 573 | |
---|
[1762] | 574 | !-- local variables |
---|
[1779] | 575 | INTEGER :: ip, ij, ierr, j |
---|
| 576 | INTEGER :: nr ! Number of Elements to getb from server |
---|
| 577 | INTEGER :: myIndex |
---|
| 578 | REAL(wp) :: t1,t2 |
---|
| 579 | TYPE(PeDef),POINTER :: aPE |
---|
| 580 | TYPE(ArrayDef),POINTER :: ar |
---|
| 581 | INTEGER,DIMENSION(1) :: buf_shape |
---|
| 582 | REAL(wp),POINTER,DIMENSION(:) :: buf |
---|
| 583 | REAL(wp),POINTER,DIMENSION(:,:) :: data_2d |
---|
| 584 | REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d |
---|
| 585 | character(len=DA_Namelen) :: myName |
---|
| 586 | INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp |
---|
[1762] | 587 | |
---|
[1797] | 588 | ! |
---|
| 589 | !-- Synchronization of the model is done in pmci_client_synchronize and pmci_server_synchronize |
---|
| 590 | !-- Therefor the RMA window can be filled without sychronization at this point and a barrier |
---|
| 591 | !-- is not necessary |
---|
| 592 | !-- Please note that WaitTime has to be set in PMC_S_FillBuffer AND PMC_C_GetBuffer |
---|
| 593 | if(present(WaitTime)) then |
---|
| 594 | t1 = PMC_Time() |
---|
| 595 | CALL MPI_Barrier(me%intra_comm, ierr) |
---|
| 596 | t2 = PMC_Time() |
---|
| 597 | WaitTime = t2-t1 |
---|
| 598 | end if |
---|
[1762] | 599 | |
---|
| 600 | CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for buffer is filled |
---|
| 601 | |
---|
| 602 | do ip=1,me%inter_npes |
---|
| 603 | aPE => me%PEs(ip) |
---|
| 604 | |
---|
[1779] | 605 | do j=1,aPE%Nr_arrays |
---|
| 606 | ar => aPE%array_list(j) |
---|
| 607 | if(ar%NrDims == 2) then |
---|
[1762] | 608 | nr = aPE%NrEle |
---|
[1779] | 609 | else if(ar%NrDims == 3) then |
---|
[1762] | 610 | nr = aPE%NrEle*ar%A_dim(1) |
---|
| 611 | end if |
---|
| 612 | |
---|
| 613 | buf_shape(1) = nr |
---|
[1786] | 614 | CALL c_f_pointer(ar%RecvBuf, buf, buf_shape) |
---|
[1779] | 615 | ! |
---|
| 616 | !-- MPI passive target RMA |
---|
[1762] | 617 | if(nr > 0) then |
---|
[1786] | 618 | target_disp = (ar%RecvIndex-1) |
---|
| 619 | CALL MPI_Win_lock (MPI_LOCK_SHARED , ip-1, 0, me%win_server_client, ierr) |
---|
| 620 | CALL MPI_Get (buf, nr, MPI_REAL, ip-1, target_disp, nr, MPI_REAL, me%win_server_client, ierr) |
---|
| 621 | CALL MPI_Win_unlock (ip-1, me%win_server_client, ierr) |
---|
[1762] | 622 | end if |
---|
| 623 | |
---|
| 624 | myIndex = 1 |
---|
[1779] | 625 | if(ar%NrDims == 2) then |
---|
[1762] | 626 | |
---|
| 627 | CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) |
---|
| 628 | do ij=1,aPE%NrEle |
---|
| 629 | data_2d(aPE%locInd(ij)%j,aPE%locInd(ij)%i) = buf(myIndex) |
---|
| 630 | myIndex = myIndex+1 |
---|
| 631 | end do |
---|
[1779] | 632 | else if(ar%NrDims == 3) then |
---|
[1762] | 633 | CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) |
---|
| 634 | do ij=1,aPE%NrEle |
---|
| 635 | data_3d(:,aPE%locInd(ij)%j,aPE%locInd(ij)%i) = buf(myIndex:myIndex+ar%A_dim(1)-1) |
---|
| 636 | myIndex = myIndex+ar%A_dim(1) |
---|
| 637 | end do |
---|
| 638 | end if |
---|
| 639 | |
---|
| 640 | end do |
---|
| 641 | end do |
---|
| 642 | return |
---|
| 643 | END SUBROUTINE PMC_C_GetBuffer |
---|
| 644 | |
---|
| 645 | SUBROUTINE PMC_C_PutBuffer (WaitTime) |
---|
[1779] | 646 | |
---|
[1762] | 647 | IMPLICIT none |
---|
| 648 | |
---|
[1779] | 649 | REAL(wp), INTENT(OUT), optional :: WaitTime |
---|
| 650 | |
---|
[1762] | 651 | !-- local variables |
---|
[1779] | 652 | INTEGER :: ip, ij, ierr, j |
---|
| 653 | INTEGER :: nr ! Number of Elements to getb from server |
---|
| 654 | INTEGER :: myIndex |
---|
| 655 | REAL(wp) :: t1,t2 |
---|
| 656 | TYPE(PeDef),POINTER :: aPE |
---|
| 657 | TYPE(ArrayDef),POINTER :: ar |
---|
| 658 | INTEGER,DIMENSION(1) :: buf_shape |
---|
| 659 | REAL(wp),POINTER,DIMENSION(:) :: buf |
---|
| 660 | REAL(wp),POINTER,DIMENSION(:,:) :: data_2d |
---|
| 661 | REAL(wp),POINTER,DIMENSION(:,:,:) :: data_3d |
---|
| 662 | character(len=DA_Namelen) :: myName |
---|
| 663 | INTEGER(kind=MPI_ADDRESS_KIND) :: target_disp |
---|
[1762] | 664 | |
---|
[1786] | 665 | t1 = PMC_Time() |
---|
| 666 | CALL MPI_Barrier(me%intra_comm, ierr) ! Wait for empty buffer |
---|
| 667 | t2 = PMC_Time() |
---|
| 668 | if(present(WaitTime)) WaitTime = t2-t1 |
---|
[1762] | 669 | |
---|
| 670 | do ip=1,me%inter_npes |
---|
| 671 | aPE => me%PEs(ip) |
---|
| 672 | |
---|
[1779] | 673 | do j=1,aPE%Nr_arrays |
---|
| 674 | ar => aPE%array_list(j) |
---|
[1786] | 675 | myIndex=1 |
---|
[1779] | 676 | if(ar%NrDims == 2) then |
---|
[1786] | 677 | buf_shape(1) = aPE%NrEle |
---|
| 678 | CALL c_f_pointer(ar%SendBuf, buf, buf_shape) |
---|
[1762] | 679 | CALL c_f_pointer(ar%data, data_2d, ar%A_dim(1:2)) |
---|
| 680 | do ij=1,aPE%NrEle |
---|
| 681 | buf(myIndex) = data_2d(aPE%locInd(ij)%j,aPE%locInd(ij)%i) |
---|
| 682 | myIndex = myIndex+1 |
---|
| 683 | end do |
---|
[1779] | 684 | else if(ar%NrDims == 3) then |
---|
[1786] | 685 | buf_shape(1) = aPE%NrEle*ar%A_dim(1) |
---|
| 686 | CALL c_f_pointer(ar%SendBuf, buf, buf_shape) |
---|
[1762] | 687 | CALL c_f_pointer(ar%data, data_3d, ar%A_dim(1:3)) |
---|
| 688 | do ij=1,aPE%NrEle |
---|
| 689 | buf(myIndex:myIndex+ar%A_dim(1)-1) = data_3d(:,aPE%locInd(ij)%j,aPE%locInd(ij)%i) |
---|
| 690 | myIndex = myIndex+ar%A_dim(1) |
---|
| 691 | end do |
---|
| 692 | end if |
---|
| 693 | end do |
---|
| 694 | end do |
---|
| 695 | |
---|
| 696 | |
---|
[1786] | 697 | ! CALL MPI_Win_fence (0, me%win_server_client, ierr) ! Fence might do it, test later |
---|
| 698 | CALL MPI_Barrier(me%intra_comm, ierr) ! buffer is filled |
---|
[1762] | 699 | |
---|
| 700 | return |
---|
| 701 | END SUBROUTINE PMC_C_PutBuffer |
---|
| 702 | |
---|
[1764] | 703 | #endif |
---|
[1762] | 704 | END MODULE pmc_client |
---|