[102] | 1 | SUBROUTINE surface_coupler |
---|
| 2 | |
---|
| 3 | !------------------------------------------------------------------------------! |
---|
[258] | 4 | ! Current revisions: |
---|
[102] | 5 | ! ----------------- |
---|
[710] | 6 | ! |
---|
[102] | 7 | ! |
---|
| 8 | ! Former revisions: |
---|
| 9 | ! ------------------ |
---|
| 10 | ! $Id: surface_coupler.f90 710 2011-03-30 09:45:27Z suehring $ |
---|
| 11 | ! |
---|
[710] | 12 | ! 709 2011-03-30 09:31:40Z raasch |
---|
| 13 | ! formatting adjustments |
---|
| 14 | ! |
---|
[668] | 15 | ! 667 2010-12-23 12:06:00Z suehring/gryschka |
---|
[709] | 16 | ! Additional case for nonequivalent processor and grid topopolgy in ocean and |
---|
| 17 | ! atmosphere added (coupling_topology = 1). |
---|
[668] | 18 | ! Added exchange of u and v from Ocean to Atmosphere |
---|
| 19 | ! |
---|
[392] | 20 | ! 291 2009-04-16 12:07:26Z raasch |
---|
| 21 | ! Coupling with independent precursor runs. |
---|
| 22 | ! Output of messages replaced by message handling routine. |
---|
| 23 | ! |
---|
[226] | 24 | ! 206 2008-10-13 14:59:11Z raasch |
---|
| 25 | ! Implementation of a MPI-1 Coupling: replaced myid with target_id, |
---|
| 26 | ! deleted __mpi2 directives |
---|
| 27 | ! |
---|
[110] | 28 | ! 109 2007-08-28 15:26:47Z letzel |
---|
[102] | 29 | ! Initial revision |
---|
| 30 | ! |
---|
| 31 | ! Description: |
---|
| 32 | ! ------------ |
---|
| 33 | ! Data exchange at the interface between coupled models |
---|
| 34 | !------------------------------------------------------------------------------! |
---|
| 35 | |
---|
| 36 | USE arrays_3d |
---|
| 37 | USE control_parameters |
---|
| 38 | USE cpulog |
---|
| 39 | USE grid_variables |
---|
| 40 | USE indices |
---|
| 41 | USE interfaces |
---|
| 42 | USE pegrid |
---|
| 43 | |
---|
| 44 | IMPLICIT NONE |
---|
| 45 | |
---|
[108] | 46 | INTEGER :: i, j, k |
---|
[102] | 47 | |
---|
[291] | 48 | REAL :: time_since_reference_point_rem |
---|
[667] | 49 | REAL :: total_2d(-nbgp:ny+nbgp,-nbgp:nx+nbgp) |
---|
[102] | 50 | |
---|
[206] | 51 | #if defined( __parallel ) |
---|
[102] | 52 | |
---|
[667] | 53 | CALL cpu_log( log_point(39), 'surface_coupler', 'start' ) |
---|
[102] | 54 | |
---|
[667] | 55 | |
---|
| 56 | |
---|
[102] | 57 | ! |
---|
[108] | 58 | !-- In case of model termination initiated by the remote model |
---|
| 59 | !-- (terminate_coupled_remote > 0), initiate termination of the local model. |
---|
| 60 | !-- The rest of the coupler must then be skipped because it would cause an MPI |
---|
| 61 | !-- intercomminucation hang. |
---|
| 62 | !-- If necessary, the coupler will be called at the beginning of the next |
---|
| 63 | !-- restart run. |
---|
[667] | 64 | |
---|
| 65 | IF ( coupling_topology == 0 ) THEN |
---|
[709] | 66 | CALL MPI_SENDRECV( terminate_coupled, 1, MPI_INTEGER, target_id, & |
---|
| 67 | 0, & |
---|
| 68 | terminate_coupled_remote, 1, MPI_INTEGER, target_id, & |
---|
[667] | 69 | 0, comm_inter, status, ierr ) |
---|
| 70 | ELSE |
---|
| 71 | IF ( myid == 0) THEN |
---|
| 72 | CALL MPI_SENDRECV( terminate_coupled, 1, MPI_INTEGER, & |
---|
| 73 | target_id, 0, & |
---|
| 74 | terminate_coupled_remote, 1, MPI_INTEGER, & |
---|
| 75 | target_id, 0, & |
---|
| 76 | comm_inter, status, ierr ) |
---|
| 77 | ENDIF |
---|
[709] | 78 | CALL MPI_BCAST( terminate_coupled_remote, 1, MPI_INTEGER, 0, comm2d, & |
---|
| 79 | ierr ) |
---|
[667] | 80 | |
---|
| 81 | ALLOCATE( total_2d_a(-nbgp:ny_a+nbgp,-nbgp:nx_a+nbgp), & |
---|
| 82 | total_2d_o(-nbgp:ny_o+nbgp,-nbgp:nx_o+nbgp) ) |
---|
| 83 | |
---|
| 84 | ENDIF |
---|
| 85 | |
---|
[108] | 86 | IF ( terminate_coupled_remote > 0 ) THEN |
---|
[274] | 87 | WRITE( message_string, * ) 'remote model "', & |
---|
| 88 | TRIM( coupling_mode_remote ), & |
---|
| 89 | '" terminated', & |
---|
| 90 | '&with terminate_coupled_remote = ', & |
---|
| 91 | terminate_coupled_remote, & |
---|
| 92 | '&local model "', TRIM( coupling_mode ), & |
---|
| 93 | '" has', & |
---|
| 94 | '&terminate_coupled = ', & |
---|
[667] | 95 | terminate_coupled |
---|
[258] | 96 | CALL message( 'surface_coupler', 'PA0310', 1, 2, 0, 6, 0 ) |
---|
[108] | 97 | RETURN |
---|
| 98 | ENDIF |
---|
[667] | 99 | |
---|
[291] | 100 | |
---|
[108] | 101 | ! |
---|
| 102 | !-- Exchange the current simulated time between the models, |
---|
[667] | 103 | !-- currently just for total_2ding |
---|
[709] | 104 | IF ( coupling_topology == 0 ) THEN |
---|
| 105 | |
---|
| 106 | CALL MPI_SEND( time_since_reference_point, 1, MPI_REAL, target_id, 11, & |
---|
| 107 | comm_inter, ierr ) |
---|
| 108 | CALL MPI_RECV( time_since_reference_point_rem, 1, MPI_REAL, target_id, & |
---|
| 109 | 11, comm_inter, status, ierr ) |
---|
[667] | 110 | ELSE |
---|
[709] | 111 | |
---|
[667] | 112 | IF ( myid == 0 ) THEN |
---|
[709] | 113 | |
---|
| 114 | CALL MPI_SEND( time_since_reference_point, 1, MPI_REAL, target_id, & |
---|
| 115 | 11, comm_inter, ierr ) |
---|
| 116 | CALL MPI_RECV( time_since_reference_point_rem, 1, MPI_REAL, & |
---|
[667] | 117 | target_id, 11, comm_inter, status, ierr ) |
---|
[709] | 118 | |
---|
[667] | 119 | ENDIF |
---|
[709] | 120 | |
---|
| 121 | CALL MPI_BCAST( time_since_reference_point_rem, 1, MPI_REAL, 0, comm2d, & |
---|
| 122 | ierr ) |
---|
| 123 | |
---|
[667] | 124 | ENDIF |
---|
[102] | 125 | |
---|
| 126 | ! |
---|
| 127 | !-- Exchange the interface data |
---|
| 128 | IF ( coupling_mode == 'atmosphere_to_ocean' ) THEN |
---|
[667] | 129 | |
---|
| 130 | ! |
---|
[709] | 131 | !-- Horizontal grid size and number of processors is equal in ocean and |
---|
| 132 | !-- atmosphere |
---|
| 133 | IF ( coupling_topology == 0 ) THEN |
---|
[102] | 134 | |
---|
| 135 | ! |
---|
[709] | 136 | !-- Send heat flux at bottom surface to the ocean |
---|
| 137 | CALL MPI_SEND( shf(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 12, & |
---|
| 138 | comm_inter, ierr ) |
---|
[102] | 139 | ! |
---|
[709] | 140 | !-- Send humidity flux at bottom surface to the ocean |
---|
[667] | 141 | IF ( humidity ) THEN |
---|
[709] | 142 | CALL MPI_SEND( qsws(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 13, & |
---|
| 143 | comm_inter, ierr ) |
---|
[667] | 144 | ENDIF |
---|
| 145 | ! |
---|
[709] | 146 | !-- Receive temperature at the bottom surface from the ocean |
---|
| 147 | CALL MPI_RECV( pt(0,nysg,nxlg), 1, type_xy, target_id, 14, & |
---|
| 148 | comm_inter, status, ierr ) |
---|
[108] | 149 | ! |
---|
[709] | 150 | !-- Send the momentum flux (u) at bottom surface to the ocean |
---|
| 151 | CALL MPI_SEND( usws(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 15, & |
---|
| 152 | comm_inter, ierr ) |
---|
[102] | 153 | ! |
---|
[709] | 154 | !-- Send the momentum flux (v) at bottom surface to the ocean |
---|
| 155 | CALL MPI_SEND( vsws(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 16, & |
---|
| 156 | comm_inter, ierr ) |
---|
[102] | 157 | ! |
---|
[709] | 158 | !-- Receive u at the bottom surface from the ocean |
---|
| 159 | CALL MPI_RECV( u(0,nysg,nxlg), 1, type_xy, target_id, 17, & |
---|
| 160 | comm_inter, status, ierr ) |
---|
[667] | 161 | ! |
---|
[709] | 162 | !-- Receive v at the bottom surface from the ocean |
---|
| 163 | CALL MPI_RECV( v(0,nysg,nxlg), 1, type_xy, target_id, 18, & |
---|
| 164 | comm_inter, status, ierr ) |
---|
[667] | 165 | ! |
---|
| 166 | !-- Horizontal grid size or number of processors differs between |
---|
| 167 | !-- ocean and atmosphere |
---|
| 168 | ELSE |
---|
| 169 | |
---|
| 170 | ! |
---|
[709] | 171 | !-- Send heat flux at bottom surface to the ocean |
---|
[667] | 172 | total_2d_a = 0.0 |
---|
[709] | 173 | total_2d = 0.0 |
---|
[667] | 174 | total_2d(nys:nyn,nxl:nxr) = shf(nys:nyn,nxl:nxr) |
---|
[709] | 175 | |
---|
| 176 | CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, MPI_SUM, 0, & |
---|
| 177 | comm2d, ierr ) |
---|
| 178 | CALL interpolate_to_ocean( 12 ) |
---|
[667] | 179 | ! |
---|
[709] | 180 | !-- Send humidity flux at bottom surface to the ocean |
---|
| 181 | IF ( humidity ) THEN |
---|
[667] | 182 | total_2d_a = 0.0 |
---|
[709] | 183 | total_2d = 0.0 |
---|
[667] | 184 | total_2d(nys:nyn,nxl:nxr) = qsws(nys:nyn,nxl:nxr) |
---|
[709] | 185 | |
---|
| 186 | CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, MPI_SUM, & |
---|
| 187 | 0, comm2d, ierr ) |
---|
| 188 | CALL interpolate_to_ocean( 13 ) |
---|
[667] | 189 | ENDIF |
---|
| 190 | ! |
---|
[709] | 191 | !-- Receive temperature at the bottom surface from the ocean |
---|
| 192 | IF ( myid == 0 ) THEN |
---|
[667] | 193 | CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & |
---|
| 194 | target_id, 14, comm_inter, status, ierr ) |
---|
| 195 | ENDIF |
---|
| 196 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 197 | CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, 0, comm2d, & |
---|
| 198 | ierr ) |
---|
[667] | 199 | pt(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) |
---|
| 200 | ! |
---|
[709] | 201 | !-- Send momentum flux (u) at bottom surface to the ocean |
---|
[667] | 202 | total_2d_a = 0.0 |
---|
[709] | 203 | total_2d = 0.0 |
---|
[667] | 204 | total_2d(nys:nyn,nxl:nxr) = usws(nys:nyn,nxl:nxr) |
---|
[709] | 205 | CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, MPI_SUM, 0, & |
---|
| 206 | comm2d, ierr ) |
---|
| 207 | CALL interpolate_to_ocean( 15 ) |
---|
[667] | 208 | ! |
---|
[709] | 209 | !-- Send momentum flux (v) at bottom surface to the ocean |
---|
[667] | 210 | total_2d_a = 0.0 |
---|
[709] | 211 | total_2d = 0.0 |
---|
[667] | 212 | total_2d(nys:nyn,nxl:nxr) = vsws(nys:nyn,nxl:nxr) |
---|
[709] | 213 | CALL MPI_REDUCE( total_2d, total_2d_a, ngp_a, MPI_REAL, MPI_SUM, 0, & |
---|
| 214 | comm2d, ierr ) |
---|
| 215 | CALL interpolate_to_ocean( 16 ) |
---|
[667] | 216 | ! |
---|
[709] | 217 | !-- Receive u at the bottom surface from the ocean |
---|
| 218 | IF ( myid == 0 ) THEN |
---|
[667] | 219 | CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & |
---|
[709] | 220 | target_id, 17, comm_inter, status, ierr ) |
---|
[667] | 221 | ENDIF |
---|
| 222 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 223 | CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, 0, comm2d, & |
---|
| 224 | ierr ) |
---|
[667] | 225 | u(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) |
---|
| 226 | ! |
---|
[709] | 227 | !-- Receive v at the bottom surface from the ocean |
---|
| 228 | IF ( myid == 0 ) THEN |
---|
[667] | 229 | CALL MPI_RECV( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, & |
---|
[709] | 230 | target_id, 18, comm_inter, status, ierr ) |
---|
[667] | 231 | ENDIF |
---|
| 232 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 233 | CALL MPI_BCAST( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, 0, comm2d, & |
---|
| 234 | ierr ) |
---|
[667] | 235 | v(0,nysg:nyng,nxlg:nxrg) = total_2d_a(nysg:nyng,nxlg:nxrg) |
---|
| 236 | |
---|
| 237 | ENDIF |
---|
| 238 | |
---|
[102] | 239 | ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN |
---|
| 240 | |
---|
| 241 | ! |
---|
[667] | 242 | !-- Horizontal grid size and number of processors is equal |
---|
| 243 | !-- in ocean and atmosphere |
---|
| 244 | IF ( coupling_topology == 0 ) THEN |
---|
| 245 | ! |
---|
[709] | 246 | !-- Receive heat flux at the sea surface (top) from the atmosphere |
---|
| 247 | CALL MPI_RECV( tswst(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 12, & |
---|
| 248 | comm_inter, status, ierr ) |
---|
[102] | 249 | ! |
---|
[709] | 250 | !-- Receive humidity flux from the atmosphere (bottom) |
---|
[667] | 251 | !-- and add it to the heat flux at the sea surface (top)... |
---|
| 252 | IF ( humidity_remote ) THEN |
---|
| 253 | CALL MPI_RECV( qswst_remote(nysg,nxlg), ngp_xy, MPI_REAL, & |
---|
| 254 | target_id, 13, comm_inter, status, ierr ) |
---|
| 255 | ENDIF |
---|
| 256 | ! |
---|
| 257 | !-- Send sea surface temperature to the atmosphere model |
---|
[709] | 258 | CALL MPI_SEND( pt(nzt,nysg,nxlg), 1, type_xy, target_id, 14, & |
---|
| 259 | comm_inter, ierr ) |
---|
[667] | 260 | ! |
---|
| 261 | !-- Receive momentum flux (u) at the sea surface (top) from the atmosphere |
---|
[709] | 262 | CALL MPI_RECV( uswst(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 15, & |
---|
| 263 | comm_inter, status, ierr ) |
---|
[667] | 264 | ! |
---|
| 265 | !-- Receive momentum flux (v) at the sea surface (top) from the atmosphere |
---|
[709] | 266 | CALL MPI_RECV( vswst(nysg,nxlg), ngp_xy, MPI_REAL, target_id, 16, & |
---|
| 267 | comm_inter, status, ierr ) |
---|
[667] | 268 | ! |
---|
[709] | 269 | !-- Send u to the atmosphere |
---|
| 270 | CALL MPI_SEND( u(nzt,nysg,nxlg), 1, type_xy, target_id, 17, & |
---|
| 271 | comm_inter, ierr ) |
---|
[667] | 272 | ! |
---|
[709] | 273 | !-- Send v to the atmosphere |
---|
| 274 | CALL MPI_SEND( v(nzt,nysg,nxlg), 1, type_xy, target_id, 18, & |
---|
| 275 | comm_inter, ierr ) |
---|
| 276 | ! |
---|
[667] | 277 | !-- Horizontal gridsize or number of processors differs between |
---|
| 278 | !-- ocean and atmosphere |
---|
| 279 | ELSE |
---|
| 280 | ! |
---|
[709] | 281 | !-- Receive heat flux at the sea surface (top) from the atmosphere |
---|
| 282 | IF ( myid == 0 ) THEN |
---|
[667] | 283 | CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
[709] | 284 | target_id, 12, comm_inter, status, ierr ) |
---|
[667] | 285 | ENDIF |
---|
| 286 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 287 | CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, 0, comm2d, & |
---|
| 288 | ierr ) |
---|
[667] | 289 | tswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) |
---|
| 290 | ! |
---|
[709] | 291 | !-- Receive humidity flux at the sea surface (top) from the atmosphere |
---|
| 292 | IF ( humidity_remote ) THEN |
---|
| 293 | IF ( myid == 0 ) THEN |
---|
[667] | 294 | CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
[709] | 295 | target_id, 13, comm_inter, status, ierr ) |
---|
[667] | 296 | ENDIF |
---|
| 297 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 298 | CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, 0, & |
---|
| 299 | comm2d, ierr) |
---|
[667] | 300 | qswst_remote(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) |
---|
| 301 | ENDIF |
---|
| 302 | ! |
---|
| 303 | !-- Send surface temperature to atmosphere |
---|
| 304 | total_2d_o = 0.0 |
---|
[709] | 305 | total_2d = 0.0 |
---|
[667] | 306 | total_2d(nys:nyn,nxl:nxr) = pt(nzt,nys:nyn,nxl:nxr) |
---|
| 307 | |
---|
[709] | 308 | CALL MPI_REDUCE( total_2d, total_2d_o, ngp_o, MPI_REAL, MPI_SUM, 0, & |
---|
| 309 | comm2d, ierr) |
---|
| 310 | CALL interpolate_to_atmos( 14 ) |
---|
[667] | 311 | ! |
---|
[709] | 312 | !-- Receive momentum flux (u) at the sea surface (top) from the atmosphere |
---|
| 313 | IF ( myid == 0 ) THEN |
---|
[667] | 314 | CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
[709] | 315 | target_id, 15, comm_inter, status, ierr ) |
---|
[667] | 316 | ENDIF |
---|
| 317 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
| 318 | CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
[709] | 319 | 0, comm2d, ierr ) |
---|
[667] | 320 | uswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) |
---|
| 321 | ! |
---|
[709] | 322 | !-- Receive momentum flux (v) at the sea surface (top) from the atmosphere |
---|
| 323 | IF ( myid == 0 ) THEN |
---|
[667] | 324 | CALL MPI_RECV( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
[709] | 325 | target_id, 16, comm_inter, status, ierr ) |
---|
[667] | 326 | ENDIF |
---|
| 327 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[709] | 328 | CALL MPI_BCAST( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, 0, comm2d, & |
---|
| 329 | ierr ) |
---|
[667] | 330 | vswst(nysg:nyng,nxlg:nxrg) = total_2d_o(nysg:nyng,nxlg:nxrg) |
---|
| 331 | ! |
---|
| 332 | !-- Send u to atmosphere |
---|
| 333 | total_2d_o = 0.0 |
---|
[709] | 334 | total_2d = 0.0 |
---|
[667] | 335 | total_2d(nys:nyn,nxl:nxr) = u(nzt,nys:nyn,nxl:nxr) |
---|
[709] | 336 | CALL MPI_REDUCE( total_2d, total_2d_o, ngp_o, MPI_REAL, MPI_SUM, 0, & |
---|
| 337 | comm2d, ierr ) |
---|
| 338 | CALL interpolate_to_atmos( 17 ) |
---|
[667] | 339 | ! |
---|
| 340 | !-- Send v to atmosphere |
---|
| 341 | total_2d_o = 0.0 |
---|
[709] | 342 | total_2d = 0.0 |
---|
[667] | 343 | total_2d(nys:nyn,nxl:nxr) = v(nzt,nys:nyn,nxl:nxr) |
---|
[709] | 344 | CALL MPI_REDUCE( total_2d, total_2d_o, ngp_o, MPI_REAL, MPI_SUM, 0, & |
---|
| 345 | comm2d, ierr ) |
---|
| 346 | CALL interpolate_to_atmos( 18 ) |
---|
[667] | 347 | |
---|
| 348 | ENDIF |
---|
| 349 | |
---|
| 350 | ! |
---|
| 351 | !-- Conversions of fluxes received from atmosphere |
---|
| 352 | IF ( humidity_remote ) THEN |
---|
[108] | 353 | ! |
---|
[709] | 354 | !-- Here tswst is still the sum of atmospheric bottom heat fluxes, |
---|
| 355 | !-- * latent heat of vaporization in m2/s2, or 540 cal/g, or 40.65 kJ/mol |
---|
| 356 | !-- /(rho_atm(=1.0)*c_p) |
---|
| 357 | tswst = tswst + qswst_remote * 2.2626108E6 / 1005.0 |
---|
| 358 | ! |
---|
[667] | 359 | !-- ...and convert it to a salinity flux at the sea surface (top) |
---|
[108] | 360 | !-- following Steinhorn (1991), JPO 21, pp. 1681-1683: |
---|
| 361 | !-- S'w' = -S * evaporation / ( rho_water * ( 1 - S ) ) |
---|
| 362 | saswst = -1.0 * sa(nzt,:,:) * qswst_remote / & |
---|
[667] | 363 | ( rho(nzt,:,:) * ( 1.0 - sa(nzt,:,:) ) ) |
---|
[108] | 364 | ENDIF |
---|
| 365 | |
---|
| 366 | ! |
---|
[102] | 367 | !-- Adjust the kinematic heat flux with respect to ocean density |
---|
| 368 | !-- (constants are the specific heat capacities for air and water) |
---|
[667] | 369 | !-- now tswst is the ocean top heat flux |
---|
[108] | 370 | tswst = tswst / rho(nzt,:,:) * 1005.0 / 4218.0 |
---|
[102] | 371 | |
---|
| 372 | ! |
---|
[667] | 373 | !-- Adjust the momentum fluxes with respect to ocean density |
---|
| 374 | uswst = uswst / rho(nzt,:,:) |
---|
| 375 | vswst = vswst / rho(nzt,:,:) |
---|
[102] | 376 | |
---|
[667] | 377 | ENDIF |
---|
| 378 | |
---|
[709] | 379 | IF ( coupling_topology == 1 ) THEN |
---|
[667] | 380 | DEALLOCATE( total_2d_o, total_2d_a ) |
---|
| 381 | ENDIF |
---|
| 382 | |
---|
| 383 | CALL cpu_log( log_point(39), 'surface_coupler', 'stop' ) |
---|
| 384 | |
---|
| 385 | #endif |
---|
| 386 | |
---|
| 387 | END SUBROUTINE surface_coupler |
---|
| 388 | |
---|
| 389 | |
---|
| 390 | |
---|
[709] | 391 | SUBROUTINE interpolate_to_atmos( tag ) |
---|
[667] | 392 | |
---|
| 393 | USE arrays_3d |
---|
| 394 | USE control_parameters |
---|
| 395 | USE grid_variables |
---|
| 396 | USE indices |
---|
| 397 | USE pegrid |
---|
| 398 | |
---|
| 399 | IMPLICIT NONE |
---|
| 400 | |
---|
| 401 | INTEGER :: dnx, dnx2, dny, dny2, i, ii, j, jj |
---|
| 402 | INTEGER, intent(in) :: tag |
---|
| 403 | |
---|
| 404 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
| 405 | |
---|
[709] | 406 | IF ( myid == 0 ) THEN |
---|
| 407 | ! |
---|
| 408 | !-- Cyclic boundary conditions for the total 2D-grid |
---|
[667] | 409 | total_2d_o(-nbgp:-1,:) = total_2d_o(ny+1-nbgp:ny,:) |
---|
| 410 | total_2d_o(:,-nbgp:-1) = total_2d_o(:,nx+1-nbgp:nx) |
---|
| 411 | |
---|
| 412 | total_2d_o(ny+1:ny+nbgp,:) = total_2d_o(0:nbgp-1,:) |
---|
| 413 | total_2d_o(:,nx+1:nx+nbgp) = total_2d_o(:,0:nbgp-1) |
---|
| 414 | |
---|
[102] | 415 | ! |
---|
[667] | 416 | !-- Number of gridpoints of the fine grid within one mesh of the coarse grid |
---|
| 417 | dnx = (nx_o+1) / (nx_a+1) |
---|
| 418 | dny = (ny_o+1) / (ny_a+1) |
---|
[102] | 419 | |
---|
| 420 | ! |
---|
[709] | 421 | !-- Distance for interpolation around coarse grid points within the fine |
---|
| 422 | !-- grid (note: 2*dnx2 must not be equal with dnx) |
---|
[667] | 423 | dnx2 = 2 * ( dnx / 2 ) |
---|
| 424 | dny2 = 2 * ( dny / 2 ) |
---|
[102] | 425 | |
---|
[667] | 426 | total_2d_a = 0.0 |
---|
[102] | 427 | ! |
---|
[667] | 428 | !-- Interpolation from ocean-grid-layer to atmosphere-grid-layer |
---|
| 429 | DO j = 0, ny_a |
---|
| 430 | DO i = 0, nx_a |
---|
| 431 | DO jj = 0, dny2 |
---|
| 432 | DO ii = 0, dnx2 |
---|
| 433 | total_2d_a(j,i) = total_2d_a(j,i) & |
---|
| 434 | + total_2d_o(j*dny+jj,i*dnx+ii) |
---|
| 435 | ENDDO |
---|
| 436 | ENDDO |
---|
| 437 | total_2d_a(j,i) = total_2d_a(j,i) / ( ( dnx2 + 1 ) * ( dny2 + 1 ) ) |
---|
| 438 | ENDDO |
---|
| 439 | ENDDO |
---|
| 440 | ! |
---|
[709] | 441 | !-- Cyclic boundary conditions for atmosphere grid |
---|
[667] | 442 | total_2d_a(-nbgp:-1,:) = total_2d_a(ny_a+1-nbgp:ny_a,:) |
---|
| 443 | total_2d_a(:,-nbgp:-1) = total_2d_a(:,nx_a+1-nbgp:nx_a) |
---|
| 444 | |
---|
| 445 | total_2d_a(ny_a+1:ny_a+nbgp,:) = total_2d_a(0:nbgp-1,:) |
---|
| 446 | total_2d_a(:,nx_a+1:nx_a+nbgp) = total_2d_a(:,0:nbgp-1) |
---|
| 447 | ! |
---|
| 448 | !-- Transfer of the atmosphere-grid-layer to the atmosphere |
---|
[709] | 449 | CALL MPI_SEND( total_2d_a(-nbgp,-nbgp), ngp_a, MPI_REAL, target_id, & |
---|
| 450 | tag, comm_inter, ierr ) |
---|
[102] | 451 | |
---|
| 452 | ENDIF |
---|
| 453 | |
---|
[667] | 454 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
[102] | 455 | |
---|
[667] | 456 | END SUBROUTINE interpolate_to_atmos |
---|
[102] | 457 | |
---|
[667] | 458 | |
---|
[709] | 459 | SUBROUTINE interpolate_to_ocean( tag ) |
---|
[667] | 460 | |
---|
| 461 | USE arrays_3d |
---|
| 462 | USE control_parameters |
---|
| 463 | USE grid_variables |
---|
| 464 | USE indices |
---|
| 465 | USE pegrid |
---|
| 466 | |
---|
| 467 | IMPLICIT NONE |
---|
| 468 | |
---|
| 469 | INTEGER :: dnx, dny, i, ii, j, jj |
---|
| 470 | INTEGER, intent(in) :: tag |
---|
[709] | 471 | REAL :: fl, fr, myl, myr |
---|
[667] | 472 | |
---|
[709] | 473 | |
---|
[667] | 474 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
| 475 | |
---|
[709] | 476 | IF ( myid == 0 ) THEN |
---|
[667] | 477 | |
---|
| 478 | ! |
---|
[709] | 479 | !-- Number of gridpoints of the fine grid within one mesh of the coarse grid |
---|
[667] | 480 | dnx = ( nx_o + 1 ) / ( nx_a + 1 ) |
---|
| 481 | dny = ( ny_o + 1 ) / ( ny_a + 1 ) |
---|
| 482 | |
---|
| 483 | ! |
---|
[709] | 484 | !-- Cyclic boundary conditions for atmosphere grid |
---|
[667] | 485 | total_2d_a(-nbgp:-1,:) = total_2d_a(ny+1-nbgp:ny,:) |
---|
| 486 | total_2d_a(:,-nbgp:-1) = total_2d_a(:,nx+1-nbgp:nx) |
---|
| 487 | |
---|
| 488 | total_2d_a(ny+1:ny+nbgp,:) = total_2d_a(0:nbgp-1,:) |
---|
| 489 | total_2d_a(:,nx+1:nx+nbgp) = total_2d_a(:,0:nbgp-1) |
---|
| 490 | ! |
---|
[709] | 491 | !-- Bilinear Interpolation from atmosphere grid-layer to ocean grid-layer |
---|
[667] | 492 | DO j = 0, ny |
---|
| 493 | DO i = 0, nx |
---|
| 494 | myl = ( total_2d_a(j+1,i) - total_2d_a(j,i) ) / dny |
---|
| 495 | myr = ( total_2d_a(j+1,i+1) - total_2d_a(j,i+1) ) / dny |
---|
| 496 | DO jj = 0, dny-1 |
---|
[709] | 497 | fl = myl*jj + total_2d_a(j,i) |
---|
| 498 | fr = myr*jj + total_2d_a(j,i+1) |
---|
[667] | 499 | DO ii = 0, dnx-1 |
---|
| 500 | total_2d_o(j*dny+jj,i*dnx+ii) = ( fr - fl ) / dnx * ii + fl |
---|
| 501 | ENDDO |
---|
| 502 | ENDDO |
---|
| 503 | ENDDO |
---|
| 504 | ENDDO |
---|
| 505 | ! |
---|
[709] | 506 | !-- Cyclic boundary conditions for ocean grid |
---|
[667] | 507 | total_2d_o(-nbgp:-1,:) = total_2d_o(ny_o+1-nbgp:ny_o,:) |
---|
| 508 | total_2d_o(:,-nbgp:-1) = total_2d_o(:,nx_o+1-nbgp:nx_o) |
---|
| 509 | |
---|
| 510 | total_2d_o(ny_o+1:ny_o+nbgp,:) = total_2d_o(0:nbgp-1,:) |
---|
| 511 | total_2d_o(:,nx_o+1:nx_o+nbgp) = total_2d_o(:,0:nbgp-1) |
---|
| 512 | |
---|
| 513 | CALL MPI_SEND( total_2d_o(-nbgp,-nbgp), ngp_o, MPI_REAL, & |
---|
| 514 | target_id, tag, comm_inter, ierr ) |
---|
| 515 | |
---|
| 516 | ENDIF |
---|
| 517 | |
---|
| 518 | CALL MPI_BARRIER( comm2d, ierr ) |
---|
| 519 | |
---|
| 520 | END SUBROUTINE interpolate_to_ocean |
---|