Changeset 2514 for palm/trunk
- Timestamp:
- Oct 4, 2017 9:52:37 AM (7 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/check_open.f90
r2512 r2514 20 20 ! Current revisions: 21 21 ! ----------------- 22 ! 22 ! Remove tabs 23 23 ! 24 24 ! Former revisions: … … 228 228 229 229 CASE ( 15, 16, 17, 18, 19, 50:59, 81:84, 104:105, 107, 109, 117 ) 230 230 231 231 IF ( myid /= 0 ) THEN 232 232 WRITE( message_string, * ) 'opening file-id ',file_id, & … … 244 244 CALL message( 'check_open', 'PA0167', 2, 2, -1, 6, 1 ) 245 245 ENDIF 246 246 247 247 ENDIF 248 248 -
palm/trunk/SOURCE/init_dvrp.f90
r2300 r2514 20 20 ! Current revisions: 21 21 ! ----------------- 22 ! 22 ! Remove tabs 23 23 ! 24 24 ! Former revisions: … … 282 282 (clip_dvrp_r-clip_dvrp_l) * superelevation_x, & 283 283 (clip_dvrp_n-clip_dvrp_s) * superelevation_y, & 284 285 284 ( zu(nz_do3d) - zu(nzb) ) * superelevation & 285 ) 286 286 287 287 ! -
palm/trunk/SOURCE/init_pegrid.f90
r2414 r2514 20 20 ! Current revisions: 21 21 ! ------------------ 22 ! 22 ! Remove tabs 23 23 ! 24 24 ! Former revisions: … … 437 437 'requirements given by the number of PEs &used', & 438 438 '& please use nx = ', nx - ( pdims(1) - ( nnx*pdims(1) & 439 439 - ( nx + 1 ) ) ), ' instead of nx =', nx 440 440 CALL message( 'init_pegrid', 'PA0226', 1, 2, 0, 6, 0 ) 441 441 ENDIF -
palm/trunk/SOURCE/land_surface_model_mod.f90
r2512 r2514 20 20 ! Current revisions: 21 21 ! ----------------- 22 ! 22 ! Remove tabs 23 23 ! 24 24 ! Former revisions: … … 2729 2729 !-- Normalize so that the sum = 1. Only relevant when the root distribution was 2730 2730 !-- set to zero due to pavement at some layers. 2731 2731 IF ( SUM( surf_lsm_h%root_fr(:,m) ) > 0.0_wp ) THEN 2732 2732 DO k = nzb_soil, nzt_soil 2733 2733 surf_lsm_h%root_fr(k,m) = surf_lsm_h%root_fr(k,m) / SUM( surf_lsm_h%root_fr(:,m) ) -
palm/trunk/SOURCE/pmc_handle_communicator_mod.f90
r2280 r2514 21 21 ! Current revisions: 22 22 ! ------------------ 23 ! 23 ! Remove tabs 24 24 ! 25 25 ! Former revisions: … … 544 544 IF ( bad_llcorner /= 0) THEN 545 545 WRITE ( message_string, *) 'at least one dimension of lower ', & 546 546 'left corner of one domain is not 0. ', & 547 547 'All lower left corners were set to (0, 0)' 548 548 CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 ) -
palm/trunk/SOURCE/urban_surface_mod.f90
r2512 r2514 21 21 ! Current revisions: 22 22 ! ------------------ 23 ! 23 ! Remove tabs 24 24 ! 25 25 ! Former revisions: … … 5541 5541 DO 5542 5542 DO while ( svf_lt(svfl(i),x) ) 5543 5543 i=i+1 5544 5544 ENDDO 5545 5545 DO while ( svf_lt(x,svfl(j)) ) -
palm/trunk/SOURCE/vertical_nesting_mod.f90
r2374 r2514 20 20 ! Current revisions: 21 21 ! ----------------- 22 ! 22 ! Remove tabs 23 23 ! 24 24 ! Former revisions: … … 88 88 89 89 LOGICAL :: vnest_init = .FALSE., vnested = .FALSE., & 90 90 vnest_twi = .FALSE., vnest_couple_rk3 = .FALSE. 91 91 92 92 ! PARIN … … 97 97 !-- Public functions 98 98 PUBLIC vnest_init_fine, vnest_boundary_conds, vnest_anterpolate, & 99 100 101 99 vnest_boundary_conds_khkm, vnest_anterpolate_e, & 100 vnest_init_pegrid_rank, vnest_init_pegrid_domain, vnest_init_grid, & 101 vnest_timestep_sync, vnest_deallocate 102 102 103 103 !-- Public constants and variables … … 115 115 nxc, nxf, nyc, nyf, nzc, nzf, & 116 116 ngp_c, ngp_f, target_idex, n_cell_c, n_cell_f, & 117 117 offset, map_coord, TYPE_VNEST_BC, TYPE_VNEST_ANTER 118 118 119 119 INTERFACE vnest_anterpolate … … 281 281 bdims(1,1)-1:bdims(1,2)+1),& 282 282 n_cell_c, MPI_REAL, target_idex, & 283 101, 283 101, comm_inter, ierr) 284 284 285 285 CALL MPI_SEND( v( bdims(3,1):bdims(3,2)+2, & … … 287 287 bdims(1,1)-1:bdims(1,2)+1),& 288 288 n_cell_c, MPI_REAL, target_idex, & 289 102, 289 102, comm_inter, ierr) 290 290 291 291 CALL MPI_SEND( w( bdims(3,1):bdims(3,2)+2, & … … 293 293 bdims(1,1)-1:bdims(1,2)+1),& 294 294 n_cell_c, MPI_REAL, target_idex, & 295 103, 295 103, comm_inter, ierr) 296 296 297 297 CALL MPI_SEND( pt(bdims(3,1):bdims(3,2)+2, & … … 299 299 bdims(1,1)-1:bdims(1,2)+1),& 300 300 n_cell_c, MPI_REAL, target_idex, & 301 105, 301 105, comm_inter, ierr) 302 302 303 303 IF ( humidity ) THEN … … 306 306 bdims(1,1)-1:bdims(1,2)+1),& 307 307 n_cell_c, MPI_REAL, target_idex, & 308 116, 308 116, comm_inter, ierr) 309 309 ENDIF 310 310 … … 313 313 bdims(1,1)-1:bdims(1,2)+1),& 314 314 n_cell_c, MPI_REAL, target_idex, & 315 104, 315 104, comm_inter, ierr) 316 316 317 317 CALL MPI_SEND(kh( bdims(3,1):bdims(3,2)+2, & … … 319 319 bdims(1,1)-1:bdims(1,2)+1),& 320 320 n_cell_c, MPI_REAL, target_idex, & 321 106, 321 106, comm_inter, ierr) 322 322 323 323 CALL MPI_SEND(km( bdims(3,1):bdims(3,2)+2, & … … 325 325 bdims(1,1)-1:bdims(1,2)+1),& 326 326 n_cell_c, MPI_REAL, target_idex, & 327 107, 327 107, comm_inter, ierr) 328 328 329 329 !-- Send Surface fluxes … … 340 340 !MERGE-WIP bdims(1,1)-1:bdims(1,2)+1),& 341 341 !MERGE-WIP n_cell_c, MPI_REAL, target_idex, & 342 !MERGE-WIP 109, 342 !MERGE-WIP 109, comm_inter, ierr ) 343 343 !MERGE-WIP 344 344 !MERGE-WIP CALL MPI_SEND(surf_def_h(0)%usws( bdims(2,1)-1:bdims(2,2)+1, & 345 345 !MERGE-WIP bdims(1,1)-1:bdims(1,2)+1),& 346 346 !MERGE-WIP n_cell_c, MPI_REAL, target_idex, & 347 !MERGE-WIP 110, 347 !MERGE-WIP 110, comm_inter, ierr ) 348 348 !MERGE-WIP 349 349 !MERGE-WIP CALL MPI_SEND(surf_def_h(0)%vsws( bdims(2,1)-1:bdims(2,2)+1, & 350 350 !MERGE-WIP bdims(1,1)-1:bdims(1,2)+1),& 351 351 !MERGE-WIP n_cell_c, MPI_REAL, target_idex, & 352 !MERGE-WIP 111, 352 !MERGE-WIP 111, comm_inter, ierr ) 353 353 !MERGE-WIP 354 354 !MERGE CALL MPI_SEND(ts ( bdims(2,1)-1:bdims(2,2)+1, & 355 355 !MERGE bdims(1,1)-1:bdims(1,2)+1),& 356 356 !MERGE n_cell_c, MPI_REAL, target_idex, & 357 !MERGE 112, 357 !MERGE 112, comm_inter, ierr ) 358 358 !MERGE 359 359 !MERGE CALL MPI_SEND(us ( bdims(2,1)-1:bdims(2,2)+1, & 360 360 !MERGE bdims(1,1)-1:bdims(1,2)+1),& 361 361 !MERGE n_cell_c, MPI_REAL, target_idex, & 362 !MERGE 113, 362 !MERGE 113, comm_inter, ierr ) 363 363 !MERGE 364 364 !MERGE CALL MPI_SEND(z0 ( bdims(2,1)-1:bdims(2,2)+1, & 365 365 !MERGE bdims(1,1)-1:bdims(1,2)+1),& 366 366 !MERGE n_cell_c, MPI_REAL, target_idex, & 367 !MERGE 114, 367 !MERGE 114, comm_inter, ierr ) 368 368 ENDIF 369 369 … … 449 449 NULLIFY ( interpol3d ) 450 450 451 !-- Recv Surface Fluxes 451 !-- Recv Surface Fluxes 452 452 IF ( use_surface_fluxes ) THEN 453 453 n_cell_c = (bdims_rem(1,2)-bdims_rem(1,1)+3) * & … … 531 531 w(nzb,:,:) = 0.0_wp 532 532 533 534 533 ! 534 !-- Temperature boundary conditions at the bottom boundary 535 535 IF ( ibc_pt_b /= 0 ) THEN 536 536 pt(nzb,:,:) = pt(nzb+1,:,:) … … 1687 1687 1688 1688 IF ( humidity ) THEN 1689 1689 CALL MPI_RECV( work3d,n_cell_c, MPI_REAL, target_idex, 209, & 1690 1690 comm_inter,status, ierr ) 1691 1691 interpol3d => q … … 2206 2206 bdims(1,1)-1:bdims(1,2)+1),& 2207 2207 n_cell_c, MPI_REAL, target_idex, & 2208 207, 2208 207, comm_inter, ierr) 2209 2209 2210 2210 CALL MPI_SEND(km(bdims(3,1) :bdims(3,2) , & … … 2212 2212 bdims(1,1)-1:bdims(1,2)+1),& 2213 2213 n_cell_c, MPI_REAL, target_idex, & 2214 208, 2214 208, comm_inter, ierr) 2215 2215 2216 2216 … … 2688 2688 comm_inter,status, ierr ) 2689 2689 2690 2690 IF ( humidity ) THEN 2691 2691 CALL MPI_RECV(q( & 2692 2692 bdims(3,1)+1:bdims(3,2), & … … 2695 2695 n_cell_c, MPI_REAL, target_idex, 106, & 2696 2696 comm_inter,status, ierr ) 2697 2697 ENDIF 2698 2698 2699 2699 CALL MPI_RECV( w( & … … 2781 2781 2782 2782 anterpol3d => u 2783 2783 2784 2784 CALL anterpolate_to_crse_u ( 101 ) 2785 2785 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 2786 101, 2786 101, comm_inter, ierr) 2787 2787 2788 2788 anterpol3d => v … … 2790 2790 CALL anterpolate_to_crse_v ( 102 ) 2791 2791 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 2792 102, 2792 102, comm_inter, ierr) 2793 2793 2794 2794 anterpol3d => pt … … 2796 2796 CALL anterpolate_to_crse_s ( 105 ) 2797 2797 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 2798 105, 2798 105, comm_inter, ierr) 2799 2799 2800 2800 … … 2805 2805 CALL anterpolate_to_crse_s ( 106 ) 2806 2806 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 2807 106, 2807 106, comm_inter, ierr) 2808 2808 ENDIF 2809 2809 … … 2816 2816 CALL anterpolate_to_crse_w ( 103 ) 2817 2817 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 2818 103, 2818 103, comm_inter, ierr) 2819 2819 2820 2820 NULLIFY ( anterpol3d ) … … 3226 3226 3227 3227 CALL MPI_SEND( work3d, 1, TYPE_VNEST_ANTER, target_idex, & 3228 104, 3228 104, comm_inter, ierr) 3229 3229 3230 3230 NULLIFY ( anterpol3d ) … … 3349 3349 3350 3350 USE indices, & 3351 3351 ONLY: nbgp, nx, ny, nz 3352 3352 3353 3353 USE kinds … … 3431 3431 3432 3432 USE indices, & 3433 3434 3433 ONLY: nbgp, nx, ny, nz, nxl, nxr, nys, nyn, nzb, nzt, & 3434 nxlg, nxrg, nysg, nyng 3435 3435 3436 3436 USE kinds … … 3583 3583 offset(2) = ( pdims_partner(2) / pdims(2) ) * pcoord(2) 3584 3584 3585 3586 3585 tempx = ( pdims_partner(1) / pdims(1) ) - 1 3586 tempy = ( pdims_partner(2) / pdims(2) ) - 1 3587 3587 ALLOCATE( c2f_dims_cg (0:5,offset(1):tempx+offset(1),offset(2):tempy+offset(2) ) ) 3588 3588 ALLOCATE( f2c_dims_cg (0:5,offset(1):tempx+offset(1),offset(2):tempy+offset(2) ) ) … … 3616 3616 3617 3617 CALL MPI_SEND( c2f_dims_cg (:,map_coord(1),map_coord(2)), 6, & 3618 3618 MPI_INTEGER, target_idex, 100, comm_inter, ierr ) 3619 3619 3620 3620 CALL MPI_SEND( f2c_dims_cg (:,map_coord(1),map_coord(2)), 6, & 3621 3621 MPI_INTEGER, target_idex, 101, comm_inter, ierr ) 3622 3622 3623 3623 end do … … 3637 3637 CALL MPI_TYPE_VECTOR ( MTV_RY, MTV_RZ, MTV_Z, MPI_REAL, TYPE_INT_YZ, IERR) 3638 3638 CALL MPI_TYPE_HVECTOR( MTV_RX, 1, MTV_Z*MTV_Y*SIZEOFREAL, & 3639 3639 TYPE_INT_YZ, TYPE_VNEST_BC, IERR) 3640 3640 CALL MPI_TYPE_FREE(TYPE_INT_YZ, IERR) 3641 CALL MPI_TYPE_COMMIT(TYPE_VNEST_BC, IERR) 3641 CALL MPI_TYPE_COMMIT(TYPE_VNEST_BC, IERR) 3642 3642 3643 3643 … … 3700 3700 USE pegrid 3701 3701 3702 3702 IMPLICIT NONE 3703 3703 3704 3704 !-- Allocate and Exchange zuc and zuf, zwc and zwf … … 3768 3768 USE pegrid 3769 3769 3770 3771 3772 3773 3770 IMPLICIT NONE 3771 3772 3773 IF (myid==0) PRINT*, '*** vnest: check parameters not implemented yet ***' 3774 3774 3775 3775 … … 3791 3791 3792 3792 IF ( coupling_mode == 'vnested_crse') THEN 3793 3794 3793 dtc = dt_3d 3794 if (myid == 0) then 3795 3795 CALL MPI_SEND( dt_3d, 1, MPI_REAL, target_id, & 3796 3796 31, comm_inter, ierr ) … … 3798 3798 target_id, 32, comm_inter, status, ierr ) 3799 3799 3800 3801 3800 endif 3801 CALL MPI_BCAST( dtf, 1, MPI_REAL, 0, comm2d, ierr ) 3802 3802 ELSE 3803 3804 3803 dtf = dt_3d 3804 if (myid == 0) then 3805 3805 CALL MPI_RECV( dtc, 1, MPI_REAL, & 3806 3806 target_id, 31, comm_inter, status, ierr ) … … 3808 3808 32, comm_inter, ierr ) 3809 3809 3810 3811 3810 endif 3811 CALL MPI_BCAST( dtc, 1, MPI_REAL, 0, comm2d, ierr ) 3812 3812 3813 3813 ENDIF
Note: See TracChangeset
for help on using the changeset viewer.