Changeset 3984 for palm/trunk/SOURCE/pmc_interface_mod.f90
- Timestamp:
- May 16, 2019 3:17:03 PM (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/pmc_interface_mod.f90
r3979 r3984 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Commenting improved, pmci_map_fine_to_coarse_grid renamed as pmci_map_child_grid_to_parent_grid, 28 ! set_child_edge_coords renamed as pmci_set_child_edge_coords, some variables renamed, etc. 29 ! 30 ! 3979 2019-05-15 13:54:29Z hellstea 27 31 ! Bugfix in pmc_interp_1sto_sn. This bug had effect only in case of 1-d domain 28 32 ! decomposition with npex = 1. … … 524 528 ! 525 529 !-- Constants 526 INTEGER(iwp), PARAMETER :: child_to_parent = 2 !< 527 INTEGER(iwp), PARAMETER :: parent_to_child = 1 !< 530 INTEGER(iwp), PARAMETER :: child_to_parent = 2 !< Parameter for pmci_parent_datatrans indicating the direction of transfer 531 INTEGER(iwp), PARAMETER :: parent_to_child = 1 !< Parameter for pmci_parent_datatrans indicating the direction of transfer 528 532 INTEGER(iwp), PARAMETER :: interpolation_scheme_lrsn = 2 !< Interpolation scheme to be used on lateral boundaries 529 533 INTEGER(iwp), PARAMETER :: interpolation_scheme_t = 3 !< Interpolation scheme to be used on top boundary 530 534 ! 531 535 !-- Coupler setup 532 INTEGER(iwp), SAVE :: comm_world_nesting !< 533 INTEGER(iwp), SAVE :: cpl_id = 1 !< 536 INTEGER(iwp), SAVE :: comm_world_nesting !< Global nesting communicator 537 INTEGER(iwp), SAVE :: cpl_id = 1 !< 534 538 INTEGER(iwp), SAVE :: cpl_npe_total !< 535 539 INTEGER(iwp), SAVE :: cpl_parent_id !< … … 547 551 ! 548 552 !-- Geometry 549 REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC :: coord_x !< 550 REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC :: coord_y !< 551 REAL(wp), SAVE, PUBLIC :: lower_left_coord_x !< 552 REAL(wp), SAVE, PUBLIC :: lower_left_coord_y !< 553 REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC :: coord_x !< Array for the absolute x-coordinates 554 REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC :: coord_y !< Array for the absolute y-coordinates 555 REAL(wp), SAVE, PUBLIC :: lower_left_coord_x !< x-coordinate of the lower left corner of the domain 556 REAL(wp), SAVE, PUBLIC :: lower_left_coord_y !< y-coordinate of the lower left corner of the domain 553 557 ! 554 558 !-- Children's parent-grid arrays … … 644 648 645 649 REAL(wp), DIMENSION(7) :: parent_grid_info_real !< 646 REAL(wp), DIMENSION(2) :: zmax_coarse !<647 650 648 651 TYPE parentgrid_def … … 865 868 ! 866 869 !-- Initialize the child (must be called before pmc_setup_parent) 867 ! EXTEND THIS COMMENT EXPLAINEIN WHY IT MUST BE CALLED BEFORE870 !-- Klaus, extend this comment to explain why it must be called before 868 871 CALL pmci_setup_child ! CONTAIN THIS 869 872 ! … … 999 1002 ENDIF 1000 1003 ENDDO 1001 zmax_coarse = child_grid_info(1:2)1002 1004 ! 1003 1005 !-- Get absolute coordinates from the child … … 1101 1103 ENDIF 1102 1104 1103 CALL set_child_edge_coords1105 CALL pmci_set_child_edge_coords 1104 1106 1105 1107 DEALLOCATE( child_coord_x ) … … 1110 1112 CALL pmc_send_to_child( child_id, rans_mode, 1, 0, 19, ierr ) 1111 1113 ! 1112 !-- Send coarsegrid information to child1114 !-- Send parent grid information to child 1113 1115 CALL pmc_send_to_child( child_id, parent_grid_info_real, & 1114 1116 SIZE( parent_grid_info_real ), 0, 21, & … … 1290 1292 1291 1293 1292 SUBROUTINE set_child_edge_coords1294 SUBROUTINE pmci_set_child_edge_coords 1293 1295 IMPLICIT NONE 1294 1296 1295 INTEGER(iwp) :: nbgp_lpm = 1 !< 1297 INTEGER(iwp) :: nbgp_lpm = 1 !< Number of ghost-point layers used for lpm (Klaus, is this correct?) 1296 1298 1297 1299 … … 1313 1315 childgrid(m)%ny_coord = child_coord_y(ny_child) + dy_child 1314 1316 childgrid(m)%ny_coord_b = child_coord_y(ny_child+nbgp_lpm) + dy_child 1315 childgrid(m)%uz_coord = zmax_coarse(2)1316 childgrid(m)%uz_coord_b = zmax_coarse(1)1317 1318 END SUBROUTINE set_child_edge_coords1317 childgrid(m)%uz_coord = child_grid_info(2) 1318 childgrid(m)%uz_coord_b = child_grid_info(1) 1319 1320 END SUBROUTINE pmci_set_child_edge_coords 1319 1321 1320 1322 #endif … … 1331 1333 INTEGER(iwp) :: lb !< Running index for aerosol size bins 1332 1334 INTEGER(iwp) :: lc !< Running index for aerosol mass bins 1333 INTEGER(iwp) :: lg !< Running index for salsagases1335 INTEGER(iwp) :: lg !< Running index for SALSA gases 1334 1336 INTEGER(iwp) :: n !< Running index for number of chemical species 1335 1337 INTEGER(iwp), DIMENSION(3) :: child_grid_dim !< Array for sending the child-grid dimensions to parent … … 1337 1339 REAL(wp), DIMENSION(5) :: child_grid_info !< Array for sending the child-grid spacings etc to parent 1338 1340 1339 CHARACTER( LEN=da_namelen ) :: myname !< 1340 CHARACTER(LEN=5) :: salsa_char !< 1341 CHARACTER( LEN=da_namelen ) :: myname !< Name of the variable to be coupled 1342 CHARACTER(LEN=5) :: salsa_char !< Name extension for the variable name in case of SALSA variable 1341 1343 1342 1344 ! … … 1345 1347 IF ( .NOT. pmc_is_rootmodel() ) THEN 1346 1348 ! 1347 !-- ADD A DESCRIPTION HERE WHAT PMC_CHILDINIT DOES1349 !-- KLaus, add a description here what pmc_childinit does 1348 1350 CALL pmc_childinit 1349 1351 ! … … 1355 1357 !-- pmci_set_array_pointer (for parent arrays) 1356 1358 !-- pmci_create_childs_parent_grid_arrays (for child's parent-grid arrays) 1357 CALL pmc_set_dataarray_name( ' coarse', 'u' ,'fine', 'u',ierr )1358 CALL pmc_set_dataarray_name( ' coarse', 'v' ,'fine', 'v',ierr )1359 CALL pmc_set_dataarray_name( ' coarse', 'w' ,'fine', 'w',ierr )1359 CALL pmc_set_dataarray_name( 'parent', 'u', 'child', 'u', ierr ) 1360 CALL pmc_set_dataarray_name( 'parent', 'v', 'child', 'v', ierr ) 1361 CALL pmc_set_dataarray_name( 'parent', 'w', 'child', 'w', ierr ) 1360 1362 ! 1361 1363 !-- Set data array name for TKE. Please note, nesting of TKE is actually … … 1363 1365 !-- design of model coupler, however, data array names must be already 1364 1366 !-- available at this point. 1365 IF ( ( rans_mode_parent .AND. rans_mode ) .OR. &1366 ( .NOT. rans_mode_parent .AND. .NOT. rans_mode .AND. &1367 IF ( ( rans_mode_parent .AND. rans_mode ) .OR. & 1368 ( .NOT. rans_mode_parent .AND. .NOT. rans_mode .AND. & 1367 1369 .NOT. constant_diffusion ) ) THEN 1368 CALL pmc_set_dataarray_name( ' coarse', 'e' ,'fine', 'e',ierr )1370 CALL pmc_set_dataarray_name( 'parent', 'e', 'child', 'e', ierr ) 1369 1371 ENDIF 1370 1372 ! … … 1373 1375 !-- above. 1374 1376 IF ( rans_mode_parent .AND. rans_mode .AND. rans_tke_e ) THEN 1375 CALL pmc_set_dataarray_name( ' coarse', 'diss' ,'fine', 'diss',ierr )1377 CALL pmc_set_dataarray_name( 'parent', 'diss', 'child', 'diss', ierr ) 1376 1378 ENDIF 1377 1379 1378 1380 IF ( .NOT. neutral ) THEN 1379 CALL pmc_set_dataarray_name( ' coarse', 'pt' ,'fine', 'pt', ierr )1381 CALL pmc_set_dataarray_name( 'parent', 'pt' ,'child', 'pt', ierr ) 1380 1382 ENDIF 1381 1383 1382 1384 IF ( humidity ) THEN 1383 1385 1384 CALL pmc_set_dataarray_name( ' coarse', 'q' ,'fine', 'q',ierr )1386 CALL pmc_set_dataarray_name( 'parent', 'q', 'child', 'q', ierr ) 1385 1387 1386 1388 IF ( bulk_cloud_model .AND. microphysics_morrison ) THEN 1387 CALL pmc_set_dataarray_name( ' coarse', 'qc' ,'fine', 'qc',ierr )1388 CALL pmc_set_dataarray_name( ' coarse', 'nc' ,'fine', 'nc',ierr )1389 CALL pmc_set_dataarray_name( 'parent', 'qc', 'child', 'qc', ierr ) 1390 CALL pmc_set_dataarray_name( 'parent', 'nc', 'child', 'nc', ierr ) 1389 1391 ENDIF 1390 1392 1391 1393 IF ( bulk_cloud_model .AND. microphysics_seifert ) THEN 1392 CALL pmc_set_dataarray_name( ' coarse', 'qr' ,'fine', 'qr',ierr )1393 CALL pmc_set_dataarray_name( ' coarse', 'nr' ,'fine', 'nr',ierr )1394 CALL pmc_set_dataarray_name( 'parent', 'qr', 'child', 'qr', ierr ) 1395 CALL pmc_set_dataarray_name( 'parent', 'nr', 'child', 'nr', ierr ) 1394 1396 ENDIF 1395 1397 … … 1397 1399 1398 1400 IF ( passive_scalar ) THEN 1399 CALL pmc_set_dataarray_name( ' coarse', 's' ,'fine', 's',ierr )1401 CALL pmc_set_dataarray_name( 'parent', 's', 'child', 's', ierr ) 1400 1402 ENDIF 1401 1403 1402 1404 IF ( particle_advection ) THEN 1403 CALL pmc_set_dataarray_name( 'coarse', 'nr_part' ,'fine', & 1404 'nr_part', ierr ) 1405 CALL pmc_set_dataarray_name( 'coarse', 'part_adr' ,'fine', & 1406 'part_adr', ierr ) 1405 CALL pmc_set_dataarray_name( 'parent', 'nr_part', 'child', 'nr_part', ierr ) 1406 CALL pmc_set_dataarray_name( 'parent', 'part_adr', 'child', 'part_adr', ierr ) 1407 1407 ENDIF 1408 1408 1409 1409 IF ( air_chemistry .AND. nest_chemistry ) THEN 1410 1410 DO n = 1, nspec 1411 CALL pmc_set_dataarray_name( 'coarse', & 1412 'chem_' // & 1413 TRIM( chem_species(n)%name ), & 1414 'fine', & 1415 'chem_' // & 1416 TRIM( chem_species(n)%name ), & 1417 ierr ) 1411 CALL pmc_set_dataarray_name( 'parent', 'chem_' // TRIM( chem_species(n)%name ), & 1412 'child', 'chem_' // TRIM( chem_species(n)%name ), ierr ) 1418 1413 ENDDO 1419 1414 ENDIF … … 1422 1417 DO lb = 1, nbins_aerosol 1423 1418 WRITE(salsa_char,'(i0)') lb 1424 CALL pmc_set_dataarray_name( 'coarse', & 1425 'an_' // & 1426 TRIM( salsa_char ), & 1427 'fine', & 1428 'an_' // & 1429 TRIM( salsa_char ), & 1430 ierr ) 1419 CALL pmc_set_dataarray_name( 'parent', 'an_' // TRIM( salsa_char ), & 1420 'child', 'an_' // TRIM( salsa_char ), ierr ) 1431 1421 ENDDO 1432 1422 DO lc = 1, nbins_aerosol * ncomponents_mass 1433 1423 WRITE(salsa_char,'(i0)') lc 1434 CALL pmc_set_dataarray_name( 'coarse', & 1435 'am_' // & 1436 TRIM( salsa_char ), & 1437 'fine', & 1438 'am_' // & 1439 TRIM( salsa_char ), & 1440 ierr ) 1424 CALL pmc_set_dataarray_name( 'parent', 'am_' // TRIM( salsa_char ), & 1425 'child', 'am_' // TRIM( salsa_char ), ierr ) 1441 1426 ENDDO 1442 1427 IF ( .NOT. salsa_gases_from_chem ) THEN 1443 1428 DO lg = 1, ngases_salsa 1444 1429 WRITE(salsa_char,'(i0)') lg 1445 CALL pmc_set_dataarray_name( 'coarse', & 1446 'sg_' // & 1447 TRIM( salsa_char ), & 1448 'fine', & 1449 'sg_' // & 1450 TRIM( salsa_char ), & 1451 ierr ) 1430 CALL pmc_set_dataarray_name( 'parent', 'sg_' // TRIM( salsa_char ), & 1431 'child', 'sg_' // TRIM( salsa_char ), ierr ) 1452 1432 ENDDO 1453 1433 ENDIF … … 1520 1500 CALL MPI_BCAST( rans_mode_parent, 1, MPI_LOGICAL, 0, comm2d, ierr ) 1521 1501 ! 1522 !-- Find the index bounds for the nest domain in the coarse-grid index space1523 CALL pmci_map_ fine_to_coarse_grid1502 !-- Find the index bounds for the nest domain in the parent-grid index space 1503 CALL pmci_map_child_grid_to_parent_grid 1524 1504 ! 1525 1505 !-- TO_DO: Klaus give a comment what is happening here … … 1537 1517 DO WHILE ( pmc_c_getnextarray( myname ) ) 1538 1518 ! 1539 !-- Note that cg%nz is not the original nz of parent, but the highest1519 !-- Note that pg%nz is not the original nz of parent, but the highest 1540 1520 !-- parent-grid level needed for nesting. 1541 !-- Please note, in case of chemical species an additional parameter1542 !-- need to be passed, which is required to set the pointer correctly1543 !-- to the chemical-species data structure. Hence, first check if current1544 !-- variable is a chemical species. If so, pass index id of respective1545 !-- speciesand increment this subsequently.1521 !-- Note that in case of chemical species or SALSA variables an additional 1522 !-- parameter needs to be passed. The parameter is required to set the pointer 1523 !-- correctlyto the chemical-species or SALSA data structure. Hence, first check if 1524 !-- the current variable is a chemical species or a SALSA variable. If so, pass 1525 !-- index id of respective sub-variable (species or bin) and increment this subsequently. 1546 1526 IF ( INDEX( TRIM( myname ), 'chem_' ) /= 0 ) THEN 1547 1527 CALL pmci_create_childs_parent_grid_arrays ( myname, ipl, ipr, jps, jpn, pg%nz, n ) … … 1573 1553 1574 1554 1575 SUBROUTINE pmci_map_fine_to_coarse_grid 1576 ! 1577 !-- Determine index bounds of interpolation/anterpolation area in the coarse 1578 !-- grid index space 1555 SUBROUTINE pmci_map_child_grid_to_parent_grid 1556 ! 1557 !-- Determine index bounds of interpolation/anterpolation area in the parent-grid index space 1579 1558 IMPLICIT NONE 1580 1559 … … 1582 1561 1583 1562 INTEGER(iwp), DIMENSION(4) :: parent_bound_global !< Transfer array for global parent-grid index bounds 1584 INTEGER(iwp), DIMENSION(2) :: size_of_array !< 1585 1586 INTEGER(iwp) :: i !<1587 INTEGER(iwp) :: iauxl !< 1588 INTEGER(iwp) :: iauxr !< 1589 INTEGER(iwp) :: ijaux !< 1590 INTEGER(iwp) :: j !<1591 INTEGER(iwp) :: jauxs !< 1592 INTEGER(iwp) :: jauxn !< 1563 INTEGER(iwp), DIMENSION(2) :: size_of_array !< For sending the dimensions of parent_bound_all to parent 1564 1565 INTEGER(iwp) :: ip !< Running parent-grid index in the x-direction 1566 INTEGER(iwp) :: iauxl !< Offset between the index bound ipl and the auxiliary index bound ipla 1567 INTEGER(iwp) :: iauxr !< Offset between the index bound ipr and the auxiliary index bound ipra 1568 INTEGER(iwp) :: ijaux !< Temporary variable for receiving the index bound from the neighbouring subdomain 1569 INTEGER(iwp) :: jp !< Running parent-grid index in the y-direction 1570 INTEGER(iwp) :: jauxs !< Offset between the index bound jps and the auxiliary index bound jpsa 1571 INTEGER(iwp) :: jauxn !< Offset between the index bound jpn and the auxiliary index bound jpna 1593 1572 1594 1573 REAL(wp) :: xexl !< Parent-grid array exceedance behind the left edge of the child PE subdomain … … 1596 1575 REAL(wp) :: yexs !< Parent-grid array exceedance behind the south edge of the child PE subdomain 1597 1576 REAL(wp) :: yexn !< Parent-grid array exceedance behind the north edge of the child PE subdomain 1598 REAL(wp) :: xcs !< RENAME 1599 REAL(wp) :: xce !< RENAME 1600 REAL(wp) :: ycs !< RENAME 1601 REAL(wp) :: yce !< RENAME 1602 1603 ! 1604 !-- Determine the anterpolation index limits. If at least half of the 1605 !-- parent-grid cell is within the current child sub-domain, then it 1606 !-- is included in the current sub-domain's anterpolation domain. 1607 !-- Else the parent-grid cell is included in the neighbouring subdomain's 1608 !-- anterpolation domain, or not included at all if we are at the outer 1609 !-- edge of the child domain. This may occur especially when a large grid-spacing 1610 !-- ratio is used. 1611 ! 1612 !-- Left 1613 !-- EXPLAIN THE EXTENSION HERE AND IN THE OTHER OCCASIONS (r, s, n) 1577 REAL(wp) :: xpl !< Requested left-edge x-coordinate of the parent-grid array domain (at the internal boundaries 1578 !< the real edge may differ from this in some cases as explained in the comment block below) 1579 REAL(wp) :: xpr !< Requested right-edge x-coordinate of the parent-grid array domain (at the internal boundaries 1580 !< the real edge may differ from this in some cases as explained in the comment block below) 1581 REAL(wp) :: yps !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries 1582 !< the real edge may differ from this in some cases as explained in the comment block below) 1583 REAL(wp) :: ypn !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries 1584 !< the real edge may differ from this in some cases as explained in the comment block below) 1585 1586 ! 1587 !-- Determine the index limits for the child's parent-grid arrays (such as uc for example). 1588 !-- Note that at the outer edges of the child domain (nest boundaries) these arrays exceed 1589 !-- the boundary by two parent-grid cells. At the internal boundaries, there are no 1590 !-- exceedances and thus no overlaps with the neighbouring subdomain. If at least half 1591 !-- of the parent-grid cell is within the current child sub-domain, then it is included 1592 !-- in the current sub-domain's parent-grid array. Else the parent-grid cell is 1593 !-- included in the neighbouring subdomain's parent-grid array, or not included at all if 1594 !-- we are at the outer edge of the child domain. This may occur especially when a large 1595 !-- grid-spacing ratio is used. 1596 ! 1597 !-- Left boundary. 1598 !-- Extension by two parent-grid cells behind the boundary, see the comment block above. 1614 1599 IF ( bc_dirichlet_l ) THEN 1615 xexl = 2 * pg%dx1600 xexl = 2.0_wp * pg%dx 1616 1601 iauxl = 0 1617 1602 ELSE … … 1619 1604 iauxl = 1 1620 1605 ENDIF 1621 x cs= coord_x(nxl) - xexl1622 DO i = 0, pg%nx1623 IF ( pg%coord_x(i ) + 0.5_wp * pg%dx >= xcs ) THEN ! Consider changing >= to ==1624 ipl = MAX( 0, i )1606 xpl = coord_x(nxl) - xexl 1607 DO ip = 0, pg%nx 1608 IF ( pg%coord_x(ip) + 0.5_wp * pg%dx >= xpl ) THEN 1609 ipl = MAX( 0, ip ) 1625 1610 EXIT 1626 1611 ENDIF 1627 1612 ENDDO 1628 1613 ! 1629 !-- Right 1614 !-- Right boundary. 1615 !-- Extension by two parent-grid cells behind the boundary, see the comment block above. 1630 1616 IF ( bc_dirichlet_r ) THEN 1631 xexr = 2 * pg%dx1617 xexr = 2.0_wp * pg%dx 1632 1618 iauxr = 0 1633 1619 ELSE … … 1635 1621 iauxr = 1 1636 1622 ENDIF 1637 x ce= coord_x(nxr+1) + xexr1638 DO i = pg%nx, 0 , -11639 IF ( pg%coord_x(i ) + 0.5_wp * pg%dx <= xce) THEN1640 ipr = MIN( pg%nx, MAX( ipl, i ) )1623 xpr = coord_x(nxr+1) + xexr 1624 DO ip = pg%nx, 0 , -1 1625 IF ( pg%coord_x(ip) + 0.5_wp * pg%dx <= xpr ) THEN 1626 ipr = MIN( pg%nx, MAX( ipl, ip ) ) 1641 1627 EXIT 1642 1628 ENDIF 1643 1629 ENDDO 1644 1630 ! 1645 !-- South 1631 !-- South boundary. 1632 !-- Extension by two parent-grid cells behind the boundary, see the comment block above. 1646 1633 IF ( bc_dirichlet_s ) THEN 1647 yexs = 2 * pg%dy1634 yexs = 2.0_wp * pg%dy 1648 1635 jauxs = 0 1649 1636 ELSE … … 1651 1638 jauxs = 1 1652 1639 ENDIF 1653 y cs = coord_y(nys) - yexs1654 DO j = 0, pg%ny1655 IF ( pg%coord_y(j ) + 0.5_wp * pg%dy >= ycs ) THEN1656 jps = MAX( 0, j )1640 yps = coord_y(nys) - yexs 1641 DO jp = 0, pg%ny 1642 IF ( pg%coord_y(jp) + 0.5_wp * pg%dy >= yps ) THEN 1643 jps = MAX( 0, jp ) 1657 1644 EXIT 1658 1645 ENDIF 1659 1646 ENDDO 1660 1647 ! 1661 !-- North 1648 !-- North boundary. 1649 !-- Extension by two parent-grid cells behind the boundary, see the comment block above. 1662 1650 IF ( bc_dirichlet_n ) THEN 1663 yexn = 2 * pg%dy1651 yexn = 2.0_wp * pg%dy 1664 1652 jauxn = 0 1665 1653 ELSE … … 1667 1655 jauxn = 1 1668 1656 ENDIF 1669 y ce= coord_y(nyn+1) + yexn1670 DO j = pg%ny, 0 , -11671 IF ( pg%coord_y(j ) + 0.5_wp * pg%dy <= yce) THEN1672 jpn = MIN( pg%ny, MAX( jps, j ) )1657 ypn = coord_y(nyn+1) + yexn 1658 DO jp = pg%ny, 0 , -1 1659 IF ( pg%coord_y(jp) + 0.5_wp * pg%dy <= ypn ) THEN 1660 jpn = MIN( pg%ny, MAX( jps, jp ) ) 1673 1661 EXIT 1674 1662 ENDIF … … 1678 1666 !-- This is a safety measure mainly for cases with high grid-spacing 1679 1667 !-- ratio and narrow child subdomains. 1680 IF ( nxl == 0 ) THEN 1681 CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr ) 1682 ELSE IF ( nxr == nx ) THEN 1683 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr ) 1684 ipl = ijaux + 1 1685 ELSE 1686 CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr ) 1687 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr ) 1688 ipl = ijaux + 1 1689 ENDIF 1690 IF ( nys == 0 ) THEN 1691 CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr ) 1692 ELSE IF ( nyn == ny ) THEN 1693 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr ) 1694 jps = ijaux + 1 1695 ELSE 1696 CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr ) 1697 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr ) 1698 jps = ijaux + 1 1699 ENDIF 1700 1701 WRITE(9,"('Pmci_map_fine_to_coarse_grid. Parent-grid array bounds: ',4(i4,2x))") & 1668 IF ( pdims(1) > 1 ) THEN 1669 IF ( nxl == 0 ) THEN 1670 CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr ) 1671 ELSE IF ( nxr == nx ) THEN 1672 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr ) 1673 ipl = ijaux + 1 1674 ELSE 1675 CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr ) 1676 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr ) 1677 ipl = ijaux + 1 1678 ENDIF 1679 ENDIF 1680 1681 IF ( pdims(2) > 1 ) THEN 1682 IF ( nys == 0 ) THEN 1683 CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr ) 1684 ELSE IF ( nyn == ny ) THEN 1685 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr ) 1686 jps = ijaux + 1 1687 ELSE 1688 CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr ) 1689 CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr ) 1690 jps = ijaux + 1 1691 ENDIF 1692 ENDIF 1693 1694 WRITE(9,"('pmci_map_child_grid_to_parent_grid. Parent-grid array bounds: ',4(i4,2x))") & 1702 1695 ipl, ipr, jps, jpn 1703 1696 FLUSH(9) … … 1709 1702 parent_bound(5) = myid 1710 1703 ! 1711 !-- The following index bounds are used for allocating index mapping and some other auxiliary arrays 1704 !-- The following auxiliary index bounds are used for allocating index mapping and 1705 !-- some other auxiliary arrays. 1712 1706 ipla = ipl - iauxl 1713 1707 ipra = ipr + iauxr … … 1715 1709 jpna = jpn + jauxn 1716 1710 ! 1711 !-- The index-bounds parent_bound of all subdomains of the current child domain 1712 !-- must be sent to the parent in order for the parent to create the index list. 1713 !-- For this reason, the parent_bound arrays are packed together in single 1714 !-- array parent_bound_all using MPI_GATHER. 1717 1715 !-- Note that MPI_Gather receives data from all processes in the rank order 1718 !-- This fact is exploited in creating the index list in pmci_create_index_list 1719 ! IMPROVE THIS COMMENT. EXPLAIN WHERE THIS INFORMATION IS NEEDED. 1716 !-- This fact is exploited in creating the index list in pmci_create_index_list. 1720 1717 CALL MPI_GATHER( parent_bound, 5, MPI_INTEGER, parent_bound_all, 5, & 1721 1718 MPI_INTEGER, 0, comm2d, ierr ) … … 1734 1731 ENDIF 1735 1732 ! 1736 !-- Broadca t the global parent-grid index bounds to all current child processes1733 !-- Broadcast the global parent-grid index bounds to all current child processes 1737 1734 CALL MPI_BCAST( parent_bound_global, 4, MPI_INTEGER, 0, comm2d, ierr ) 1738 1735 iplg = parent_bound_global(1) … … 1740 1737 jpsg = parent_bound_global(3) 1741 1738 jpng = parent_bound_global(4) 1742 WRITE( 9, "(' Pmci_map_fine_to_coarse_grid. Global parent-grid index bounds iplg, iprg, jpsg, jpng: ',4(i4,2x))" ) &1739 WRITE( 9, "('pmci_map_child_grid_to_parent_grid. Global parent-grid index bounds iplg, iprg, jpsg, jpng: ',4(i4,2x))" ) & 1743 1740 iplg, iprg, jpsg, jpng 1744 1741 FLUSH( 9 ) 1745 1742 1746 END SUBROUTINE pmci_map_ fine_to_coarse_grid1743 END SUBROUTINE pmci_map_child_grid_to_parent_grid 1747 1744 1748 1745 … … 1786 1783 ! 1787 1784 !-- First determine kcto and kctw which refer to the uppermost 1788 !-- coarse-grid levels below the child top-boundary level.1785 !-- parent-grid levels below the child top-boundary level. 1789 1786 kk = 0 1790 1787 DO WHILE ( pg%zu(kk) <= zu(nzt) ) … … 1992 1989 WRITE( 9, * ) 1993 1990 ! 1994 !-- Precomputation of number of fine-grid nodes inside parent-grid cells.1991 !-- Precomputation of number of child-grid nodes inside parent-grid cells. 1995 1992 !-- Note that ii, jj, and kk are parent-grid indices. 1996 1993 !-- This information is needed in the anterpolation. … … 2076 2073 !-- Error 2077 2074 WRITE( message_string, * ) 'child domain too narrow for anterpolation in x-direction' 2078 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0652', 3, 2, 0, 6, 0 )2075 CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 ) 2079 2076 ELSE IF ( iprg - iplg + 1 < 11 ) THEN 2080 2077 ! 2081 2078 !-- Warning 2082 2079 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0' 2083 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0653', 0, 1, 0, 6, 0 )2080 CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 ) 2084 2081 anterpolation_buffer_width = 0 2085 2082 ELSE … … 2087 2084 !-- Informative message 2088 2085 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to default value 2' 2089 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0654', 0, 0, 0, 6, 0 )2086 CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 ) 2090 2087 anterpolation_buffer_width = 2 2091 2088 ENDIF … … 2098 2095 !-- Error 2099 2096 WRITE( message_string, * ) 'child domain too narrow for anterpolation in y-direction' 2100 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0652', 3, 2, 0, 6, 0 )2097 CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 ) 2101 2098 ELSE IF ( jpng - jpsg + 1 < 11 ) THEN 2102 2099 ! 2103 2100 !-- Warning 2104 2101 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0' 2105 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0653', 0, 1, 0, 6, 0 )2102 CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 ) 2106 2103 anterpolation_buffer_width = 0 2107 2104 ELSE … … 2109 2106 !-- Informative message 2110 2107 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to default value 2' 2111 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0654', 0, 0, 0, 6, 0 )2108 CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 ) 2112 2109 anterpolation_buffer_width = 2 2113 2110 ENDIF … … 2120 2117 !-- Error 2121 2118 WRITE( message_string, * ) 'child domain too shallow for anterpolation in z-direction' 2122 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0652', 3, 2, 0, 6, 0 )2119 CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 ) 2123 2120 ELSE IF ( kctw - 3 < 1 ) THEN 2124 2121 ! 2125 2122 !-- Warning 2126 2123 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0' 2127 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0653', 0, 1, 0, 6, 0 )2124 CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 ) 2128 2125 anterpolation_buffer_width = 0 2129 2126 ELSE … … 2131 2128 !-- Informative message 2132 2129 WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to default value 2' 2133 CALL message( 'pmci_ map_fine_to_coarse_grid', 'PA0654', 0, 0, 0, 6, 0 )2130 CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 ) 2134 2131 anterpolation_buffer_width = 2 2135 2132 ENDIF
Note: See TracChangeset
for help on using the changeset viewer.