Changeset 3948
- Timestamp:
- May 3, 2019 2:49:57 PM (6 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/pmc_interface_mod.f90
r3947 r3948 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Some variables renamed, a little cleaning up and some commenting improvements 28 ! 29 ! 3947 2019-05-03 07:56:44Z hellstea 27 30 ! The checks included in 3946 are extended for the z-direction and moved into its 28 31 ! own subroutine called from pmci_define_index_mapping. … … 542 545 ! 543 546 !-- Children's parent-grid arrays 544 INTEGER(iwp), SAVE, DIMENSION(5), PUBLIC :: coarse_bound !< subdomain index bounds for children's parent-grid arrays547 INTEGER(iwp), SAVE, DIMENSION(5), PUBLIC :: parent_bound !< subdomain index bounds for children's parent-grid arrays 545 548 546 549 REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET :: dissc !< Parent-grid array on child domain - dissipation rate … … 804 807 ! 805 808 !-- Set the steering switch which tells the models that they are nested (of 806 !-- course the root domain (cpl_id = 1)is not nested)807 IF ( cpl_id >= 2) THEN809 !-- course the root domain is not nested) 810 IF ( .NOT. pmc_is_rootmodel() ) THEN 808 811 child_domain = .TRUE. 809 812 WRITE( coupling_char, '(A2,I2.2)') '_N', cpl_id … … 823 826 !-- because no location messages would be generated otherwise. 824 827 !-- world_comm is given a dummy value to avoid compiler warnings (INTENT(OUT) 825 !-- should get an explicit value) 826 !-- todo: why don't we print an error message instead of these settings? 828 !-- must get an explicit value). 829 !-- Note that this branch is only to avoid compiler warnings. The actual 830 !-- execution never reaches here because the call of this subroutine is 831 !-- already enclosed by #if defined( __parallel ). 827 832 cpl_id = 1 828 833 nested_run = .FALSE. … … 944 949 !< of the child msib is within the y-range of the child m 945 950 ! 946 !-- Initialize the current pmc parent 951 !-- Initialize the current pmc parent. 947 952 CALL pmc_parentinit 948 953 ! … … 980 985 !-- Find the highest child-domain level in the parent grid for the reduced z 981 986 !-- transfer 982 DO kp = 1, nz 987 DO kp = 1, nzt 983 988 IF ( zw(kp) > child_height ) THEN 984 989 nz_child = kp … … 1041 1046 !-- that the top ghost layer of the child grid does not exceed 1042 1047 !-- the parent domain top boundary. 1043 IF ( child_height > zw(nz ) ) THEN1048 IF ( child_height > zw(nzt) ) THEN 1044 1049 nomatch = 1 1045 1050 ENDIF … … 1462 1467 CALL pmc_recv_from_parent( rans_mode_parent, 1, 0, 19, ierr ) 1463 1468 ! 1464 !-- Receive Coarsegrid information.1469 !-- Receive parent-grid information. 1465 1470 CALL pmc_recv_from_parent( parent_grid_info_real, & 1466 1471 SIZE(parent_grid_info_real), 0, 21, ierr ) … … 1480 1485 pg%nz = parent_grid_info_int(3) 1481 1486 ! 1482 !-- Get parent coordinates on coarse grid1487 !-- Allocate 1-D arrays for parent-grid coordinates and grid-spacings in the z-direction 1483 1488 ALLOCATE( pg%coord_x(-nbgp:pg%nx+nbgp) ) 1484 1489 ALLOCATE( pg%coord_y(-nbgp:pg%ny+nbgp) ) … … 1488 1493 ALLOCATE( pg%zw(0:pg%nz+1) ) 1489 1494 ! 1490 !-- Get coarse grid coordinates and values ofthe z-direction from the parent1495 !-- Get parent-grid coordinates and grid-spacings in the z-direction from the parent 1491 1496 IF ( myid == 0) THEN 1492 1497 CALL pmc_recv_from_parent( pg%coord_x, pg%nx+1+2*nbgp, 0, 24, ierr ) … … 1505 1510 CALL MPI_BCAST( pg%zu, pg%nz+2, MPI_REAL, 0, comm2d, ierr ) 1506 1511 CALL MPI_BCAST( pg%zw, pg%nz+2, MPI_REAL, 0, comm2d, ierr ) 1507 CALL MPI_BCAST( rans_mode_parent, 1, MPI_LOGICAL, 0, comm2d, ierr ) 1508 1509 ! CHECK IF pmci_check_grid_matching COULD BE MOVED HERE. 1510 1512 CALL MPI_BCAST( rans_mode_parent, 1, MPI_LOGICAL, 0, comm2d, ierr ) 1511 1513 ! 1512 1514 !-- Find the index bounds for the nest domain in the coarse-grid index space … … 1569 1571 IMPLICIT NONE 1570 1572 1571 INTEGER(iwp), DIMENSION(5,numprocs) :: coarse_bound_all !< Transfer array for parent-grid index bounds1573 INTEGER(iwp), DIMENSION(5,numprocs) :: parent_bound_all !< Transfer array for parent-grid index bounds 1572 1574 1573 1575 INTEGER(iwp), DIMENSION(4) :: parent_bound_global !< Transfer array for global parent-grid index bounds … … 1693 1695 FLUSH(9) 1694 1696 1695 coarse_bound(1) = ipl1696 coarse_bound(2) = ipr1697 coarse_bound(3) = jps1698 coarse_bound(4) = jpn1699 coarse_bound(5) = myid1697 parent_bound(1) = ipl 1698 parent_bound(2) = ipr 1699 parent_bound(3) = jps 1700 parent_bound(4) = jpn 1701 parent_bound(5) = myid 1700 1702 ! 1701 1703 !-- The following index bounds are used for allocating index mapping and some other auxiliary arrays … … 1708 1710 !-- This fact is exploited in creating the index list in pmci_create_index_list 1709 1711 ! IMPROVE THIS COMMENT. EXPLAIN WHERE THIS INFORMATION IS NEEDED. 1710 CALL MPI_GATHER( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, &1712 CALL MPI_GATHER( parent_bound, 5, MPI_INTEGER, parent_bound_all, 5, & 1711 1713 MPI_INTEGER, 0, comm2d, ierr ) 1712 1714 1713 1715 IF ( myid == 0 ) THEN 1714 size_of_array(1) = SIZE( coarse_bound_all, 1 )1715 size_of_array(2) = SIZE( coarse_bound_all, 2 )1716 size_of_array(1) = SIZE( parent_bound_all, 1 ) 1717 size_of_array(2) = SIZE( parent_bound_all, 2 ) 1716 1718 CALL pmc_send_to_parent( size_of_array, 2, 0, 40, ierr ) 1717 CALL pmc_send_to_parent( coarse_bound_all, SIZE( coarse_bound_all ), 0, 41, ierr )1719 CALL pmc_send_to_parent( parent_bound_all, SIZE( parent_bound_all ), 0, 41, ierr ) 1718 1720 ! 1719 1721 !-- Determine the global parent-grid index bounds 1720 parent_bound_global(1) = MINVAL( coarse_bound_all(1,:) )1721 parent_bound_global(2) = MAXVAL( coarse_bound_all(2,:) )1722 parent_bound_global(3) = MINVAL( coarse_bound_all(3,:) )1723 parent_bound_global(4) = MAXVAL( coarse_bound_all(4,:) )1722 parent_bound_global(1) = MINVAL( parent_bound_all(1,:) ) 1723 parent_bound_global(2) = MAXVAL( parent_bound_all(2,:) ) 1724 parent_bound_global(3) = MINVAL( parent_bound_all(3,:) ) 1725 parent_bound_global(4) = MAXVAL( parent_bound_all(4,:) ) 1724 1726 ENDIF 1725 1727 ! … … 2404 2406 IF ( .NOT. salsa_gases_from_chem ) pmc_max_array = pmc_max_array + ngases_salsa 2405 2407 2406 2407 2408 #endif 2408 2409 … … 2411 2412 2412 2413 SUBROUTINE pmci_set_array_pointer( name, child_id, nz_child, n ) 2413 2414 2414 2415 IMPLICIT NONE 2415 2416 … … 2420 2421 2421 2422 CHARACTER(LEN=*), INTENT(IN) :: name !< 2423 2424 #if defined( __parallel ) 2422 2425 ! 2423 2426 !-- Local variables: … … 2487 2490 ! 2488 2491 !-- Give only one message for the root domain 2489 IF ( myid == 0 .AND. cpl_id == 1 ) THEN2492 IF ( pmc_is_rootmodel() .AND. myid == 0 ) THEN 2490 2493 message_string = 'pointer for array "' // TRIM( name ) // '" can''t be associated' 2491 2494 CALL message( 'pmci_set_array_pointer', 'PA0117', 3, 2, 0, 6, 0 ) … … 2497 2500 2498 2501 ENDIF 2502 2503 #endif 2499 2504 2500 2505 END SUBROUTINE pmci_set_array_pointer … … 2547 2552 REAL(wp),INTENT(OUT) :: lx_coord, lx_coord_b !< 2548 2553 REAL(wp),INTENT(OUT) :: rx_coord, rx_coord_b !< 2554 REAL(wp),INTENT(OUT) :: ny_coord, ny_coord_b !< 2549 2555 REAL(wp),INTENT(OUT) :: sy_coord, sy_coord_b !< 2550 REAL(wp),INTENT(OUT) :: ny_coord, ny_coord_b !<2551 2556 REAL(wp),INTENT(OUT) :: uz_coord, uz_coord_b !< 2552 2557 2558 2559 #if defined( __parallel ) 2553 2560 2554 2561 lx_coord = childgrid(m)%lx_coord … … 2564 2571 uz_coord_b = childgrid(m)%uz_coord_b 2565 2572 2573 #endif 2574 2566 2575 END SUBROUTINE get_child_edges 2567 2576 … … 2578 2587 REAL(wp), INTENT(OUT), OPTIONAL :: dz !< 2579 2588 2589 2590 #if defined( __parallel ) 2580 2591 2581 2592 dx = childgrid(m)%dx … … 2585 2596 ENDIF 2586 2597 2598 #endif 2599 2587 2600 END SUBROUTINE get_child_gridspacing 2588 2601 … … 2598 2611 INTEGER(iwp), INTENT(IN) :: js !< 2599 2612 INTEGER(iwp), INTENT(IN) :: nzc !< nzc is pg%nz, but note that pg%nz is not the original nz of parent, 2600 2613 !< but the highest parent-grid level needed for nesting. 2601 2614 INTEGER(iwp), INTENT(IN), OPTIONAL :: n !< number of chemical species / salsa variables 2602 2615 2603 2616 CHARACTER(LEN=*), INTENT(IN) :: name !< 2617 2618 #if defined( __parallel ) 2604 2619 ! 2605 2620 !-- Local variables: … … 2687 2702 ELSE 2688 2703 ! 2689 !-- Give only one message for the first child domain.2690 IF ( myid == 0 .AND. cpl_id == 2) THEN2704 !-- Give only one message for the first child domain. 2705 IF ( cpl_id == 2 .AND. myid == 0 ) THEN 2691 2706 message_string = 'pointer for array "' // TRIM( name ) // & 2692 2707 '" can''t be associated' … … 2700 2715 ENDIF 2701 2716 2717 #endif 2702 2718 END SUBROUTINE pmci_create_childs_parent_grid_arrays 2703 2719 2704 2720 2705 !2706 ! E N D O F S E T U P R O U T I N E S2707 !2708 2721 SUBROUTINE pmci_parent_initialize 2709 2722 … … 2748 2761 ! 2749 2762 !-- Root model is never anyone's child 2750 IF ( cpl_id > 1) THEN2763 IF ( .NOT. pmc_is_rootmodel() ) THEN 2751 2764 ! 2752 2765 !-- Get data from the parent … … 3202 3215 3203 3216 3204 SUBROUTINE pmci_datatrans( local_nesting_mode ) 3217 SUBROUTINE pmci_datatrans( local_nesting_mode ) 3205 3218 ! 3206 3219 !-- This subroutine controls the nesting according to the nestpar … … 3218 3231 CHARACTER(LEN=*), INTENT(IN) :: local_nesting_mode !< Nesting mode: 'one-way', 'two-way' or 'vertical' 3219 3232 3233 #if defined( __parallel ) 3220 3234 ! 3221 3235 !-- Debug location message … … 3266 3280 ENDIF 3267 3281 3282 #endif 3268 3283 END SUBROUTINE pmci_datatrans 3269 3284 … … 3271 3286 3272 3287 SUBROUTINE pmci_parent_datatrans( direction ) 3273 3288 3274 3289 IMPLICIT NONE 3275 3290 … … 3344 3359 3345 3360 dtl = dt_3d 3346 IF ( cpl_id > 1) THEN3361 IF ( .NOT. pmc_is_rootmodel() ) THEN 3347 3362 3348 3363 IF ( direction == parent_to_child ) THEN … … 3929 3944 workarr_lr = 0.0_wp 3930 3945 IF ( pdims(2) > 1 ) THEN 3931 #if defined( __parallel ) 3946 3932 3947 IF ( bc_dirichlet_s ) THEN 3933 3948 workarr_lr(0:pg%nz+1,jpsw:jpnw-1,0:2) = parent_array(0:pg%nz+1,jpsw:jpnw-1,ipbeg:ipbeg+2) … … 3952 3967 workarr_lr(0,jpsw,0), 1, workarr_lr_exchange_type, psouth, 1, comm2d, & 3953 3968 status, ierr ) 3954 #endif 3969 3955 3970 ELSE 3956 3971 workarr_lr(0:pg%nz+1,jpsw:jpnw,0:2) = parent_array(0:pg%nz+1,jpsw:jpnw,ipbeg:ipbeg+2) … … 4163 4178 workarr_sn = 0.0_wp 4164 4179 IF ( pdims(1) > 1 ) THEN 4165 #if defined( __parallel ) 4180 4166 4181 IF ( bc_dirichlet_l ) THEN 4167 4182 workarr_sn(0:pg%nz+1,0:2,iplw:iprw-1) = parent_array(0:pg%nz+1,jpbeg:jpbeg+2,iplw:iprw-1) … … 4186 4201 workarr_sn(0,0,iplw), 1, workarr_sn_exchange_type, pleft, 1, comm2d, & 4187 4202 status, ierr ) 4188 #endif 4203 4189 4204 ELSE 4190 4205 workarr_sn(0:pg%nz+1,0:2,iplw+1:iprw-1) & … … 4402 4417 !-- Note that in case of 3-D nesting the left and right boundaries are 4403 4418 !-- not exchanged because the nest domain is not cyclic. 4404 #if defined( __parallel )4405 4419 IF ( pdims(1) > 1 ) THEN 4406 4420 ! … … 4431 4445 comm2d, status, ierr ) 4432 4446 ENDIF 4433 #endif4434 4447 4435 4448 IF ( var == 'w' ) THEN … … 4626 4639 ! 4627 4640 !-- Define the index bounds ipl_anterp, ipr_anterp, jps_anterp and jpn_anterp. 4628 !-- Note that kcb_anterp is simply zero and kct_anterp enters here as a4629 !-- parameter and it is determined in pmci_define_index_mapping.4630 !-- Note that the grid points used also for interpolation (from parent to4641 !-- Note that kcb_anterp is simply zero and kct_anterp depends on kct which enters 4642 !-- here as a parameter and it is determined in pmci_define_index_mapping. 4643 !-- Note that the grid points directly used also for interpolation (from parent to 4631 4644 !-- child) are always excluded from anterpolation, e.g. anterpolation is maximally 4632 !-- only from nzb:kct-1, as kct isused for interpolation. Similar restriction is4645 !-- only from 0:kct-1, since kct is directly used for interpolation. Similar restriction is 4633 4646 !-- applied to the lateral boundaries as well. An additional buffer is 4634 4647 !-- also applied (default value for anterpolation_buffer_width = 2) in order … … 4708 4721 SUBROUTINE pmci_boundary_conds 4709 4722 4723 #if defined( __parallel ) 4710 4724 USE chem_modules, & 4711 4725 ONLY: ibc_cs_b … … 4930 4944 ENDIF 4931 4945 4946 #endif 4932 4947 END SUBROUTINE pmci_boundary_conds 4933 4948 -
palm/trunk/SOURCE/pmc_particle_interface.f90
r3883 r3948 26 26 ! -----------------! 27 27 ! $Id$ 28 ! coarse bound renamed as parent_bound and icl, icr, jcs, jcn as ipl, ipr, jps, jpn. 29 ! 30 ! 3883 2019-04-10 12:51:50Z hellstea 28 31 ! Function get_number_of_childs renamed to get_number_of_children and cg 29 32 ! renamed to pg according to their definitions in pmc_interface_mod … … 108 111 ONLY: cpl_id, get_number_of_children, nr_part, part_adr, nested_run, & 109 112 get_childid, get_child_edges, nr_partc, part_adrc, & 110 coarse_bound, coord_x, coord_y, pg, get_child_gridspacing, &113 parent_bound, coord_x, coord_y, pg, get_child_gridspacing, & 111 114 lower_left_coord_x, lower_left_coord_y 112 115 … … 251 254 INTEGER(iwp) :: m !< loop index 252 255 INTEGER(iwp) :: ierr !< error code 253 INTEGER(iwp) :: i cl !< left boundary in coarse(parent) index space254 INTEGER(iwp) :: i cr !< right boundary in coarse(parent) index space255 INTEGER(iwp) :: j cs !< south boundary in coarse(parent) index space256 INTEGER(iwp) :: j cn !< north boundary in coarse(parent) index space256 INTEGER(iwp) :: ipl !< left boundary in coarse(parent) index space 257 INTEGER(iwp) :: ipr !< right boundary in coarse(parent) index space 258 INTEGER(iwp) :: jps !< south boundary in coarse(parent) index space 259 INTEGER(iwp) :: jpn !< north boundary in coarse(parent) index space 257 260 INTEGER(iwp) :: child_id !< Id of a child model 258 261 INTEGER(iwp) :: nr_childs !< Number of child models of the current model … … 285 288 ! 286 289 !-- Child domain boundaries in the parent index space 287 i cl = coarse_bound(1)288 i cr = coarse_bound(2)289 j cs = coarse_bound(3)290 j cn = coarse_bound(4)291 292 ALLOCATE( coarse_particles(j cs:jcn, icl:icr) )290 ipl = parent_bound(1) 291 ipr = parent_bound(2) 292 jps = parent_bound(3) 293 jpn = parent_bound(4) 294 295 ALLOCATE( coarse_particles(jps:jpn,ipl:ipr) ) 293 296 294 297 coarse_particles(:,:)%nr_particle = 0 … … 331 334 332 335 INTEGER(iwp) :: i !< x grid index 333 INTEGER(iwp) :: i cl !< left boundary in coarse(parent) index space336 INTEGER(iwp) :: ipl !< left boundary in coarse(parent) index space 334 337 INTEGER(iwp) :: ierr !< error code 335 338 INTEGER(iwp) :: ij !< combined xy index for the buffer array 336 339 INTEGER(iwp) :: ip !< loop index (child PEs) 337 340 INTEGER(iwp) :: j !< y grid index 338 INTEGER(iwp) :: j cs !< south boundary in coarse(parent) index space341 INTEGER(iwp) :: jps !< south boundary in coarse(parent) index space 339 342 INTEGER(iwp) :: nr !< number of particles to receive from a parent box 340 343 … … 359 362 !-- Synchronization is done implicitely in pmc_c_getbuffer and pmc_s_fillbuffer on the parent side 360 363 361 i cl = coarse_bound(1)362 j cs = coarse_bound(3)364 ipl = parent_bound(1) 365 jps = parent_bound(3) 363 366 364 367 DO ip = 1, me%inter_npes … … 367 370 368 371 DO ij = 1, ape%nrele 369 j = ape%locind(ij)%j + j cs - 1370 i = ape%locind(ij)%i + i cl - 1372 j = ape%locind(ij)%j + jps - 1 373 i = ape%locind(ij)%i + ipl - 1 371 374 nr = nr_partc(j,i) 372 375 IF ( nr > 0 ) THEN … … 406 409 INTEGER(iwp) :: disp_offset !< 407 410 INTEGER(iwp) :: i !< x loop index 408 INTEGER(iwp) :: i cl !< left boundary in coarse(parent) index space409 INTEGER(iwp) :: i cr !< right boundary in coarse(parent) index space411 INTEGER(iwp) :: ipl !< left boundary in coarse(parent) index space 412 INTEGER(iwp) :: ipr !< right boundary in coarse(parent) index space 410 413 INTEGER(iwp) :: ierr !< error code 411 414 INTEGER(iwp) :: ij !< combined xy index for the buffer array 412 415 INTEGER(iwp) :: ip !< loop index (child PEs) 413 416 INTEGER(iwp) :: j !< y loop index 414 INTEGER(iwp) :: j cs !< south boundary in coarse(parent) index space415 INTEGER(iwp) :: j cn !< north boundary in coarse(parent) index space417 INTEGER(iwp) :: jps !< south boundary in coarse(parent) index space 418 INTEGER(iwp) :: jpn !< north boundary in coarse(parent) index space 416 419 INTEGER(iwp) :: max_nr_particle_per_pe !< maximum number of particles per PE (depending on grid apect ratio) 417 420 INTEGER(iwp) :: n !< shorter variable name for nr_fine_in_coarse … … 440 443 !-- Child domain boundaries in the parent index space 441 444 442 i cl = coarse_bound(1)443 i cr = coarse_bound(2)444 j cs = coarse_bound(3)445 j cn = coarse_bound(4)445 ipl = parent_bound(1) 446 ipr = parent_bound(2) 447 jps = parent_bound(3) 448 jpn = parent_bound(4) 446 449 447 450 nr_partc = 0 448 451 449 DO i = i cl, icr450 DO j = j cs, jcn452 DO i = ipl, ipr 453 DO j = jps, jpn 451 454 nr_partc(j,i) = coarse_particles(j,i)%nr_particle 452 455 ENDDO … … 480 483 target_disp = disp_offset 481 484 DO ij = 1, ape%nrele 482 j = ape%locind(ij)%j + j cs - 1483 i = ape%locind(ij)%i + i cl - 1485 j = ape%locind(ij)%j + jps - 1 486 i = ape%locind(ij)%i + ipl - 1 484 487 nr = nr_partc(j,i) 485 488 IF( nr > 0 ) THEN … … 907 910 908 911 INTEGER(iwp) :: ic !< coarse x grid index 909 INTEGER(iwp) :: i cl !< left boundary in coarse(parent) index space910 INTEGER(iwp) :: i cr !< right boundary in coarse(parent) index space912 INTEGER(iwp) :: ipl !< left boundary in coarse(parent) index space 913 INTEGER(iwp) :: ipr !< right boundary in coarse(parent) index space 911 914 INTEGER(iwp) :: ip !< x grid index 912 915 INTEGER(iwp) :: jc !< coarse y grid index 913 INTEGER(iwp) :: j cn !< north boundary in coarse(parent) index space914 INTEGER(iwp) :: j cs !< south boundary in coarse(parent) index space916 INTEGER(iwp) :: jpn !< north boundary in coarse(parent) index space 917 INTEGER(iwp) :: jps !< south boundary in coarse(parent) index space 915 918 INTEGER(iwp) :: jp !< y grid index 916 919 INTEGER(iwp) :: kp !< z grid index … … 927 930 ! 928 931 !-- Child domain boundaries in the parent index space 929 i cl = coarse_bound(1)930 i cr = coarse_bound(2)931 j cs = coarse_bound(3)932 j cn = coarse_bound(4)933 934 DO ic = i cl, icr935 DO jc = j cs, jcn932 ipl = parent_bound(1) 933 ipr = parent_bound(2) 934 jps = parent_bound(3) 935 jpn = parent_bound(4) 936 937 DO ic = ipl, ipr 938 DO jc = jps, jpn 936 939 nr = coarse_particles(jc,ic)%nr_particle 937 940 … … 984 987 INTEGER(iwp) :: i !< loop index (x grid) 985 988 INTEGER(iwp) :: ic !< loop index (coarse x grid) 986 INTEGER(iwp) :: i cl !< left boundary in coarse(parent) index space987 INTEGER(iwp) :: i cr !< left boundary in coarse(parent) index space989 INTEGER(iwp) :: ipl !< left boundary in coarse(parent) index space 990 INTEGER(iwp) :: ipr !< left boundary in coarse(parent) index space 988 991 INTEGER(iwp) :: ierr !< error code 989 992 INTEGER(iwp) :: j !< loop index (y grid) 990 993 INTEGER(iwp) :: jc !< loop index (coarse y grid) 991 INTEGER(iwp) :: j cs !< south boundary in coarse(parent) index space992 INTEGER(iwp) :: j cn !< north boundary in coarse(parent) index space994 INTEGER(iwp) :: jps !< south boundary in coarse(parent) index space 995 INTEGER(iwp) :: jpn !< north boundary in coarse(parent) index space 993 996 INTEGER(iwp) :: k !< loop index (z grid) 994 997 INTEGER(iwp) :: n !< loop index (number of particles) … … 1008 1011 !-- Child domain boundaries in the parent index space 1009 1012 1010 i cl = coarse_bound(1)1011 i cr = coarse_bound(2)1012 j cs = coarse_bound(3)1013 j cn = coarse_bound(4)1013 ipl = parent_bound(1) 1014 ipr = parent_bound(2) 1015 jps = parent_bound(3) 1016 jpn = parent_bound(4) 1014 1017 1015 1018 ! … … 1022 1025 ! Clear Particle Buffer 1023 1026 1024 DO ic = i cl, icr1025 DO jc = j cs, jcn1027 DO ic = ipl, ipr 1028 DO jc = jps, jpn 1026 1029 coarse_particles(jc,ic)%nr_particle = 0 1027 1030 ENDDO … … 1054 1057 jc = y / pg%dy 1055 1058 1056 IF ( ic >= i cl .AND. ic <= icr .AND. jc >= jcs .AND. jc <= jcn ) THEN1059 IF ( ic >= ipl .AND. ic <= ipr .AND. jc >= jps .AND. jc <= jpn ) THEN 1057 1060 coarse_particles(jc,ic)%nr_particle = coarse_particles(jc,ic)%nr_particle + 1 1058 1061 CALL check_and_alloc_coarse_particle( ic, jc, coarse_particles(jc,ic)%nr_particle, with_copy=.TRUE. ) … … 1066 1069 grid_particles(k,j,i)%particles(n)%particle_mask = .FALSE. 1067 1070 ELSE 1068 WRITE(9,'(a,10i6)') 'This should not happen ',i,j,k,ic,jc,i cl,icr,jcs,jcn1071 WRITE(9,'(a,10i6)') 'This should not happen ',i,j,k,ic,jc,ipl,ipr,jps,jpn 1069 1072 CALL MPI_Abort( MPI_COMM_WORLD, 9999, ierr ) 1070 1073 ENDIF … … 1128 1131 yo = particle_in_win(pindex)%origin_y-lower_left_coord_y 1129 1132 k = nzt + 1 1130 DO WHILE ( zw(k-1) > z .AND. k > nzb + 1 ) 1133 DO WHILE ( zw(k-1) > z .AND. k > nzb + 1 ) ! kk search loop has to be optimzed !!! 1131 1134 k = k - 1 1132 1135 END DO
Note: See TracChangeset
for help on using the changeset viewer.