Changeset 667 for palm/trunk/SOURCE/surface_coupler.f90
- Timestamp:
- Dec 23, 2010 12:06:00 PM (13 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE
-
Property
svn:mergeinfo
set to
(toggle deleted branches)
/palm/branches/suehring 423-666 /palm/branches/letzel/masked_output/SOURCE 296-409
-
Property
svn:mergeinfo
set to
(toggle deleted branches)
-
palm/trunk/SOURCE/surface_coupler.f90
r392 r667 5 5 ! ----------------- 6 6 ! 7 ! additional case for nonequivalent processor and grid topopolgy in ocean and 8 ! atmosphere added (coupling_topology = 1) 9 ! 10 ! 11 ! Added exchange of u and v from Ocean to Atmosphere 12 ! 7 13 ! 8 14 ! Former revisions: … … 39 45 40 46 REAL :: time_since_reference_point_rem 47 REAL :: total_2d(-nbgp:ny+nbgp,-nbgp:nx+nbgp) 41 48 42 49 #if defined( __parallel ) 43 50 44 CALL cpu_log( log_point(39), 'surface_coupler', 'start' ) 51 CALL cpu_log( log_point(39), 'surface_coupler', 'start' ) 52 53 45 54 46 55 ! … … 51 60 !-- If necessary, the coupler will be called at the beginning of the next 52 61 !-- restart run. 53 CALL MPI_SENDRECV( terminate_coupled, 1, MPI_INTEGER, target_id, & 54 0, & 55 terminate_coupled_remote, 1, MPI_INTEGER, target_id, & 56 0, comm_inter, status, ierr ) 62 63 IF ( coupling_topology == 0 ) THEN 64 CALL MPI_SENDRECV( terminate_coupled, 1, MPI_INTEGER, target_id, & 65 0, & 66 terminate_coupled_remote, 1, MPI_INTEGER, target_id, & 67 0, comm_inter, status, ierr ) 68 ELSE 69 IF ( myid == 0) THEN 70 CALL MPI_SENDRECV( terminate_coupled, 1, MPI_INTEGER, & 71 target_id, 0, & 72 terminate_coupled_remote, 1, MPI_INTEGER, & 73 target_id, 0, & 74 comm_inter, status, ierr ) 75 ENDIF 76 CALL MPI_BCAST( terminate_coupled_remote, 1, MPI_INTEGER, 0, comm2d, ierr) 77 78 ALLOCATE( total_2d_a(-nbgp:ny_a+nbgp,-nbgp:nx_a+nbgp), & 79 total_2d_o(-nbgp:ny_o+nbgp,-nbgp:nx_o+nbgp) ) 80 81 ENDIF 82 57 83 IF ( terminate_coupled_remote > 0 ) THEN 58 84 WRITE( message_string, * ) 'remote model "', & … … 64 90 '" has', & 65 91 '&terminate_coupled = ', & 66 terminate_coupled92 terminate_coupled 67 93 CALL message( 'surface_coupler', 'PA0310', 1, 2, 0, 6, 0 ) 68 94 RETURN 69 95 ENDIF 96 70 97 71 98 ! 72 99 !-- Exchange the current simulated time between the models, 73 !-- currently just for testing 74 CALL MPI_SEND( time_since_reference_point, 1, MPI_REAL, target_id, 11, & 75 comm_inter, ierr ) 76 CALL MPI_RECV( time_since_reference_point_rem, 1, MPI_REAL, target_id, 11, & 77 comm_inter, status, ierr ) 100 !-- currently just for total_2ding 101 IF ( coupling_topology == 0 ) THEN 102 CALL MPI_SEND( time_since_reference_point, 1, MPI_REAL, & 103 target_id, 11, comm_inter, ierr ) 104 CALL MPI_RECV( time_since_reference_point_rem, 1, MPI_REAL, & 105 target_id, 11, comm_inter, status, ierr ) 106 ELSE 107 IF ( myid == 0 ) THEN 108 CALL MPI_SEND( time_since_reference_point, 1, MPI_REAL, & 109 target_id, 11, comm_inter, ierr ) 110 CALL MPI_RECV( time_since_reference_point_rem, 1, MPI_REAL, & 111 target_id, 11, comm_inter, status, ierr ) 112 ENDIF 113 CALL MPI_BCAST( time_since_reference_point_rem, 1, MPI_REAL, & 114 0, comm2d, ierr ) 115 ENDIF 78 116 WRITE ( 9, * ) 'simulated time: ', simulated_time 79 117 WRITE ( 9, * ) 'time since start of coupling: ', & 80 time_since_reference_point, ' remote: ', & 81 time_since_reference_point_rem 82 CALL local_flush( 9 ) 118 time_since_reference_point, ' remote: ', & 119 time_since_reference_point_rem 120 CALL local_flush( 9 ) 121 83 122 84 123 ! 85 124 !-- Exchange the interface data 86 125 IF ( coupling_mode == 'atmosphere_to_ocean' ) THEN 87 88 ! 89 !-- Send heat flux at bottom surface to the ocean model 90 WRITE ( 9, * ) '*** send shf to ocean' 91 CALL local_flush( 9 ) 92 CALL MPI_SEND( shf(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 12, & 93 comm_inter, ierr ) 94 95 ! 96 !-- Send humidity flux at bottom surface to the ocean model 97 IF ( humidity ) THEN 98 WRITE ( 9, * ) '*** send qsws to ocean' 126 127 ! 128 !-- Horizontal grid size and number of processors is equal 129 !-- in ocean and atmosphere 130 IF ( coupling_topology == 0 ) THEN 131 132 ! 133 !-- Send heat flux at bottom surface to the ocean model 134 CALL MPI_SEND( shf(nysg,nxlg), ngp_xy, MPI_REAL, & 135 target_id, 12, comm_inter, ierr ) 136 137 ! 138 !-- Send humidity flux at bottom surface to the ocean model 139 IF ( humidity ) THEN 140 CALL MPI_SEND( qsws(nysg,nxlg), ngp_xy, MPI_REAL, & 141 target_id, 13, comm_inter, ierr ) 142 ENDIF 143 144 ! 145 !-- Receive temperature at the bottom surface from the ocean model 146 WRITE ( 9, * ) '*** receive pt from ocean' 99 147 CALL local_flush( 9 ) 100 CALL MPI_SEND( qsws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 13, & 101 comm_inter, ierr ) 148 CALL MPI_RECV( pt(0,nysg,nxlg), 1, type_xy, & 149 target_id, 14, comm_inter, status, ierr ) 150 151 ! 152 !-- Send the momentum flux (u) at bottom surface to the ocean model 153 CALL MPI_SEND( usws(nysg,nxlg), ngp_xy, MPI_REAL, & 154 target_id, 15, comm_inter, ierr ) 155 156 ! 157 !-- Send the momentum flux (v) at bottom surface to the ocean model 158 CALL MPI_SEND( vsws(nysg,nxlg), ngp_xy, MPI_REAL, & 159 target_id, 16, comm_inter, ierr ) 160 161 ! 162 !-- Receive u at the bottom surface from the ocean model 163 CALL MPI_RECV( u(0,nysg,nxlg), 1, type_xy, & 164 target_id, 17, comm_inter, status, ierr ) 165 166 ! 167 !-- Receive v at the bottom surface from the ocean model 168 CALL MPI_RECV( v(0,nysg,nxlg), 1, type_xy, & 169 target_id, 18, comm_inter, status, ierr ) 170 171 ! 172 !-- Horizontal grid size or number of processors differs between 173 !-- ocean and atmosphere 174 ELSE 175 176 ! 177 !-- Send heat flux at bottom surface to the ocean model 178 total_2d_a = 0.0 179 total_2d = 0.0 180 total_2d(nys:nyn,nxl:nxr) = shf(nys:nyn,nxl:nxr) 181 CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, & 182 MPI_SUM, 0, comm2d, ierr ) 183 CALL interpolate_to_ocean(12) 184 185 ! 186 !-- Send humidity flux at bottom surface to the ocean model 187 IF ( humidity ) THEN 188 total_2d_a = 0.0 189 total_2d = 0.0 190 total_2d(nys:nyn,nxl:nxr) = qsws(nys:nyn,nxl:nxr) 191 CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, & 192 MPI_SUM, 0, comm2d, ierr ) 193 CALL interpolate_to_ocean(13) 194 ENDIF 195 196 ! 197 !-- Receive temperature at the bottom surface from the ocean model 198 IF ( myid == 0 ) THEN 199 CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 200 target_id, 14, comm_inter, status, ierr ) 201 ENDIF 202 CALL MPI_BARRIER( comm2d, ierr ) 203 CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 204 0, comm2d, ierr ) 205 pt(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) 206 207 ! 208 !-- Send momentum flux (u) at bottom surface to the ocean model 209 total_2d_a = 0.0 210 total_2d = 0.0 211 total_2d(nys:nyn,nxl:nxr) = usws(nys:nyn,nxl:nxr) 212 CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, & 213 MPI_SUM, 0, comm2d, ierr ) 214 CALL interpolate_to_ocean(15) 215 216 ! 217 !-- Send momentum flux (v) at bottom surface to the ocean model 218 total_2d_a = 0.0 219 total_2d = 0.0 220 total_2d(nys:nyn,nxl:nxr) = vsws(nys:nyn,nxl:nxr) 221 CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, & 222 MPI_SUM, 0, comm2d, ierr ) 223 CALL interpolate_to_ocean(16) 224 225 ! 226 !-- Receive u at the bottom surface from the ocean model 227 IF ( myid == 0 ) THEN 228 CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 229 target_id, 17, comm_inter, status, ierr ) 230 ENDIF 231 CALL MPI_BARRIER( comm2d, ierr ) 232 CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 233 0, comm2d, ierr ) 234 u(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) 235 236 ! 237 !-- Receive v at the bottom surface from the ocean model 238 IF ( myid == 0 ) THEN 239 CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 240 target_id, 18, comm_inter, status, ierr ) 241 ENDIF 242 CALL MPI_BARRIER( comm2d, ierr ) 243 CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 244 0, comm2d, ierr ) 245 v(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) 246 102 247 ENDIF 103 248 104 !105 !-- Receive temperature at the bottom surface from the ocean model106 WRITE ( 9, * ) '*** receive pt from ocean'107 CALL local_flush( 9 )108 CALL MPI_RECV( pt(0,nys-1,nxl-1), 1, type_xy, target_id, 14, &109 comm_inter, status, ierr )110 111 !112 !-- Send the momentum flux (u) at bottom surface to the ocean model113 WRITE ( 9, * ) '*** send usws to ocean'114 CALL local_flush( 9 )115 CALL MPI_SEND( usws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 15, &116 comm_inter, ierr )117 118 !119 !-- Send the momentum flux (v) at bottom surface to the ocean model120 WRITE ( 9, * ) '*** send vsws to ocean'121 CALL local_flush( 9 )122 CALL MPI_SEND( vsws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 16, &123 comm_inter, ierr )124 125 249 ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN 126 250 127 251 ! 128 !-- Receive heat flux at the sea surface (top) from the atmosphere model 129 WRITE ( 9, * ) '*** receive tswst from atmosphere' 130 CALL local_flush( 9 ) 131 CALL MPI_RECV( tswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 12, & 132 comm_inter, status, ierr ) 133 134 ! 135 !-- Receive humidity flux from the atmosphere model (bottom) 136 !-- and add it to the heat flux at the sea surface (top)... 252 !-- Horizontal grid size and number of processors is equal 253 !-- in ocean and atmosphere 254 IF ( coupling_topology == 0 ) THEN 255 ! 256 !-- Receive heat flux at the sea surface (top) from the atmosphere model 257 CALL MPI_RECV( tswst(nysg,nxlg), ngp_xy, MPI_REAL, & 258 target_id, 12, comm_inter, status, ierr ) 259 260 261 ! 262 !-- Receive humidity flux from the atmosphere model (bottom) 263 !-- and add it to the heat flux at the sea surface (top)... 264 IF ( humidity_remote ) THEN 265 CALL MPI_RECV( qswst_remote(nysg,nxlg), ngp_xy, MPI_REAL, & 266 target_id, 13, comm_inter, status, ierr ) 267 268 ENDIF 269 270 ! 271 !-- Send sea surface temperature to the atmosphere model 272 CALL MPI_SEND( pt(nzt,nysg,nxlg), 1, type_xy, & 273 target_id, 14, comm_inter, ierr ) 274 275 ! 276 !-- Receive momentum flux (u) at the sea surface (top) from the atmosphere 277 !-- model 278 WRITE ( 9, * ) '*** receive uswst from atmosphere' 279 CALL local_flush( 9 ) 280 CALL MPI_RECV( uswst(nysg,nxlg), ngp_xy, MPI_REAL, & 281 target_id, 15, comm_inter, status, ierr ) 282 283 ! 284 !-- Receive momentum flux (v) at the sea surface (top) from the atmosphere 285 !-- model 286 CALL MPI_RECV( vswst(nysg,nxlg), ngp_xy, MPI_REAL, & 287 target_id, 16, comm_inter, status, ierr ) 288 289 !-- Send u to the atmosphere model 290 CALL MPI_SEND( u(nzt,nysg,nxlg), 1, type_xy, & 291 target_id, 17, comm_inter, ierr ) 292 293 ! 294 !-- Send v to the atmosphere model 295 CALL MPI_SEND( v(nzt,nysg,nxlg), 1, type_xy, & 296 target_id, 18, comm_inter, ierr ) 297 298 ! 299 !-- Horizontal gridsize or number of processors differs between 300 !-- ocean and atmosphere 301 ELSE 302 303 ! 304 !-- Receive heat flux at the sea surface (top) from the atmosphere model 305 IF ( myid == 0 ) THEN 306 CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 307 target_id, 12, comm_inter, status, ierr ) 308 ENDIF 309 CALL MPI_BARRIER( comm2d, ierr ) 310 CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 311 0, comm2d, ierr) 312 tswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) 313 314 ! 315 !-- Receive humidity flux at the sea surface (top) from the 316 !-- atmosphere model 317 IF ( humidity_remote ) THEN 318 IF ( myid == 0 ) THEN 319 CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 320 target_id, 13, comm_inter, status, ierr ) 321 ENDIF 322 CALL MPI_BARRIER( comm2d, ierr ) 323 CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 324 0, comm2d, ierr) 325 qswst_remote(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) 326 ENDIF 327 328 ! 329 !-- Send surface temperature to atmosphere 330 total_2d_o = 0.0 331 total_2d = 0.0 332 total_2d(nys:nyn,nxl:nxr) = pt(nzt,nys:nyn,nxl:nxr) 333 334 CALL MPI_REDUCE(total_2d, total_2d_o, ngp_o, & 335 MPI_REAL, MPI_SUM, 0, comm2d, ierr) 336 337 CALL interpolate_to_atmos(14) 338 339 ! 340 !-- Receive momentum flux (u) at the sea surface (top) from the 341 !-- atmosphere model 342 IF ( myid == 0 ) THEN 343 CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 344 target_id, 15, comm_inter, status, ierr ) 345 ENDIF 346 CALL MPI_BARRIER( comm2d, ierr ) 347 CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 348 0, comm2d, ierr) 349 uswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) 350 351 ! 352 !-- Receive momentum flux (v) at the sea surface (top) from the 353 !-- atmosphere model 354 IF ( myid == 0 ) THEN 355 CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 356 target_id, 16, comm_inter, status, ierr ) 357 ENDIF 358 CALL MPI_BARRIER( comm2d, ierr ) 359 CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 360 0, comm2d, ierr) 361 vswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) 362 363 ! 364 !-- Send u to atmosphere 365 total_2d_o = 0.0 366 total_2d = 0.0 367 total_2d(nys:nyn,nxl:nxr) = u(nzt,nys:nyn,nxl:nxr) 368 CALL MPI_REDUCE(total_2d, total_2d_o, ngp_o, MPI_REAL, & 369 MPI_SUM, 0, comm2d, ierr) 370 CALL interpolate_to_atmos(17) 371 372 ! 373 !-- Send v to atmosphere 374 total_2d_o = 0.0 375 total_2d = 0.0 376 total_2d(nys:nyn,nxl:nxr) = v(nzt,nys:nyn,nxl:nxr) 377 CALL MPI_REDUCE(total_2d, total_2d_o, ngp_o, MPI_REAL, & 378 MPI_SUM, 0, comm2d, ierr) 379 CALL interpolate_to_atmos(18) 380 381 ENDIF 382 383 ! 384 !-- Conversions of fluxes received from atmosphere 137 385 IF ( humidity_remote ) THEN 138 WRITE ( 9, * ) '*** receive qswst_remote from atmosphere'139 CALL local_flush( 9 )140 CALL MPI_RECV( qswst_remote(nys-1,nxl-1), ngp_xy, MPI_REAL, &141 target_id, 13, comm_inter, status, ierr )142 143 386 !here tswst is still the sum of atmospheric bottom heat fluxes 144 387 tswst = tswst + qswst_remote * 2.2626108e6 / 1005.0 … … 146 389 !/(rho_atm(=1.0)*c_p) 147 390 ! 148 !-- ...and convert it to a salinity flux at the sea surface (top)391 !-- ...and convert it to a salinity flux at the sea surface (top) 149 392 !-- following Steinhorn (1991), JPO 21, pp. 1681-1683: 150 393 !-- S'w' = -S * evaporation / ( rho_water * ( 1 - S ) ) 151 394 saswst = -1.0 * sa(nzt,:,:) * qswst_remote / & 152 ( rho(nzt,:,:) * ( 1.0 - sa(nzt,:,:) ) )395 ( rho(nzt,:,:) * ( 1.0 - sa(nzt,:,:) ) ) 153 396 ENDIF 154 397 … … 156 399 !-- Adjust the kinematic heat flux with respect to ocean density 157 400 !-- (constants are the specific heat capacities for air and water) 158 !now tswst is the ocean top heat flux401 !-- now tswst is the ocean top heat flux 159 402 tswst = tswst / rho(nzt,:,:) * 1005.0 / 4218.0 160 161 !162 !-- Send sea surface temperature to the atmosphere model163 WRITE ( 9, * ) '*** send pt to atmosphere'164 CALL local_flush( 9 )165 CALL MPI_SEND( pt(nzt,nys-1,nxl-1), 1, type_xy, target_id, 14, &166 comm_inter, ierr )167 168 !169 !-- Receive momentum flux (u) at the sea surface (top) from the atmosphere170 !-- model171 WRITE ( 9, * ) '*** receive uswst from atmosphere'172 CALL local_flush( 9 )173 CALL MPI_RECV( uswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 15, &174 comm_inter, status, ierr )175 176 !177 !-- Receive momentum flux (v) at the sea surface (top) from the atmosphere178 !-- model179 WRITE ( 9, * ) '*** receive vswst from atmosphere'180 CALL local_flush( 9 )181 CALL MPI_RECV( vswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 16, &182 comm_inter, status, ierr )183 403 184 404 ! … … 187 407 vswst = vswst / rho(nzt,:,:) 188 408 409 410 ENDIF 411 412 IF ( coupling_topology == 1 ) THEN 413 DEALLOCATE( total_2d_o, total_2d_a ) 189 414 ENDIF 190 415 … … 193 418 #endif 194 419 195 END SUBROUTINE surface_coupler 420 END SUBROUTINE surface_coupler 421 422 423 424 SUBROUTINE interpolate_to_atmos(tag) 425 426 USE arrays_3d 427 USE control_parameters 428 USE grid_variables 429 USE indices 430 USE pegrid 431 432 IMPLICIT NONE 433 434 435 INTEGER :: dnx, dnx2, dny, dny2, i, ii, j, jj 436 INTEGER, intent(in) :: tag 437 438 CALL MPI_BARRIER( comm2d, ierr ) 439 440 IF ( myid == 0 ) THEN 441 442 ! 443 !-- cyclic boundary conditions for the total 2D-grid 444 total_2d_o(-nbgp:-1,:) = total_2d_o(ny+1-nbgp:ny,:) 445 total_2d_o(:,-nbgp:-1) = total_2d_o(:,nx+1-nbgp:nx) 446 447 total_2d_o(ny+1:ny+nbgp,:) = total_2d_o(0:nbgp-1,:) 448 total_2d_o(:,nx+1:nx+nbgp) = total_2d_o(:,0:nbgp-1) 449 450 ! 451 !-- Number of gridpoints of the fine grid within one mesh of the coarse grid 452 dnx = (nx_o+1) / (nx_a+1) 453 dny = (ny_o+1) / (ny_a+1) 454 455 ! 456 !-- Distance for interpolation around coarse grid points within the fine grid 457 !-- (note: 2*dnx2 must not be equal with dnx) 458 dnx2 = 2 * ( dnx / 2 ) 459 dny2 = 2 * ( dny / 2 ) 460 461 total_2d_a = 0.0 462 ! 463 !-- Interpolation from ocean-grid-layer to atmosphere-grid-layer 464 DO j = 0, ny_a 465 DO i = 0, nx_a 466 DO jj = 0, dny2 467 DO ii = 0, dnx2 468 total_2d_a(j,i) = total_2d_a(j,i) & 469 + total_2d_o(j*dny+jj,i*dnx+ii) 470 ENDDO 471 ENDDO 472 total_2d_a(j,i) = total_2d_a(j,i) / ( ( dnx2 + 1 ) * ( dny2 + 1 ) ) 473 ENDDO 474 ENDDO 475 ! 476 !-- cyclic boundary conditions for atmosphere grid 477 total_2d_a(-nbgp:-1,:) = total_2d_a(ny_a+1-nbgp:ny_a,:) 478 total_2d_a(:,-nbgp:-1) = total_2d_a(:,nx_a+1-nbgp:nx_a) 479 480 total_2d_a(ny_a+1:ny_a+nbgp,:) = total_2d_a(0:nbgp-1,:) 481 total_2d_a(:,nx_a+1:nx_a+nbgp) = total_2d_a(:,0:nbgp-1) 482 ! 483 !-- Transfer of the atmosphere-grid-layer to the atmosphere 484 CALL MPI_SEND( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & 485 target_id, tag, comm_inter, ierr ) 486 487 ENDIF 488 489 CALL MPI_BARRIER( comm2d, ierr ) 490 491 END SUBROUTINE interpolate_to_atmos 492 493 494 SUBROUTINE interpolate_to_ocean(tag) 495 496 USE arrays_3d 497 USE control_parameters 498 USE grid_variables 499 USE indices 500 USE pegrid 501 502 IMPLICIT NONE 503 504 REAL :: fl, fr, myl, myr 505 INTEGER :: dnx, dny, i, ii, j, jj 506 INTEGER, intent(in) :: tag 507 508 CALL MPI_BARRIER( comm2d, ierr ) 509 510 IF ( myid == 0 ) THEN 511 512 ! 513 ! Number of gridpoints of the fine grid within one mesh of the coarse grid 514 dnx = ( nx_o + 1 ) / ( nx_a + 1 ) 515 dny = ( ny_o + 1 ) / ( ny_a + 1 ) 516 517 ! 518 !-- cyclic boundary conditions for atmosphere grid 519 total_2d_a(-nbgp:-1,:) = total_2d_a(ny+1-nbgp:ny,:) 520 total_2d_a(:,-nbgp:-1) = total_2d_a(:,nx+1-nbgp:nx) 521 522 total_2d_a(ny+1:ny+nbgp,:) = total_2d_a(0:nbgp-1,:) 523 total_2d_a(:,nx+1:nx+nbgp) = total_2d_a(:,0:nbgp-1) 524 ! 525 !-- Bilinear Interpolation from atmosphere-grid-layer to ocean-grid-layer 526 DO j = 0, ny 527 DO i = 0, nx 528 myl = ( total_2d_a(j+1,i) - total_2d_a(j,i) ) / dny 529 myr = ( total_2d_a(j+1,i+1) - total_2d_a(j,i+1) ) / dny 530 DO jj = 0, dny-1 531 fl = myl*jj + total_2d_a(j,i) 532 fr = myr*jj + total_2d_a(j,i+1) 533 DO ii = 0, dnx-1 534 total_2d_o(j*dny+jj,i*dnx+ii) = ( fr - fl ) / dnx * ii + fl 535 ENDDO 536 ENDDO 537 ENDDO 538 ENDDO 539 ! 540 !-- cyclic boundary conditions for ocean grid 541 total_2d_o(-nbgp:-1,:) = total_2d_o(ny_o+1-nbgp:ny_o,:) 542 total_2d_o(:,-nbgp:-1) = total_2d_o(:,nx_o+1-nbgp:nx_o) 543 544 total_2d_o(ny_o+1:ny_o+nbgp,:) = total_2d_o(0:nbgp-1,:) 545 total_2d_o(:,nx_o+1:nx_o+nbgp) = total_2d_o(:,0:nbgp-1) 546 547 548 CALL MPI_SEND( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & 549 target_id, tag, comm_inter, ierr ) 550 551 ENDIF 552 553 CALL MPI_BARRIER( comm2d, ierr ) 554 555 END SUBROUTINE interpolate_to_ocean
Note: See TracChangeset
for help on using the changeset viewer.