Ignore:
Timestamp:
Mar 30, 2011 9:31:40 AM (13 years ago)
Author:
raasch
Message:

formatting adjustments

File:
1 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/init_pegrid.f90

    r708 r709  
    44! Current revisions:
    55! -----------------
    6 !
     6! formatting adjustments
    77!
    88! ATTENTION: nnz_x undefined problem still has to be solved!!!!!!!!
     
    577577
    578578          CALL MPI_OPEN_PORT( MPI_INFO_NULL, port_name, ierr )
    579 !
    580 !--       TEST OUTPUT (TO BE REMOVED)
    581           WRITE(9,*)  TRIM( coupling_mode ),  &
    582                ', ierr after MPI_OPEN_PORT: ', ierr
    583           CALL LOCAL_FLUSH( 9 )
    584579
    585580          CALL MPI_PUBLISH_NAME( 'palm_coupler', MPI_INFO_NULL, port_name, &
    586581                                 ierr )
    587 !
    588 !--       TEST OUTPUT (TO BE REMOVED)
    589           WRITE(9,*)  TRIM( coupling_mode ),  &
    590                ', ierr after MPI_PUBLISH_NAME: ', ierr
    591           CALL LOCAL_FLUSH( 9 )
    592582
    593583!
     
    614604
    615605          CALL MPI_LOOKUP_NAME( 'palm_coupler', MPI_INFO_NULL, port_name, ierr )
    616 !
    617 !--       TEST OUTPUT (TO BE REMOVED)
    618           WRITE(9,*)  TRIM( coupling_mode ),  &
    619                ', ierr after MPI_LOOKUP_NAME: ', ierr
    620           CALL LOCAL_FLUSH( 9 )
    621 
    622606
    623607       ENDIF
     
    631615    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
    632616
    633        PRINT*, '... before COMM_ACCEPT'
    634617       CALL MPI_COMM_ACCEPT( port_name, MPI_INFO_NULL, 0, MPI_COMM_WORLD, &
    635618                             comm_inter, ierr )
    636        PRINT*, '--- ierr = ', ierr
    637        PRINT*, '--- comm_inter atmosphere = ', comm_inter
    638 
    639619       coupling_mode_remote = 'ocean_to_atmosphere'
    640620
    641621    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
    642622
    643        IF ( myid == 0 )  PRINT*, '*** read: ', port_name, '  ierr = ', ierr
    644        PRINT*, '... before COMM_CONNECT'
    645623       CALL MPI_COMM_CONNECT( port_name, MPI_INFO_NULL, 0, MPI_COMM_WORLD, &
    646624                              comm_inter, ierr )
    647        PRINT*, '--- ierr = ', ierr
    648        PRINT*, '--- comm_inter ocean      = ', comm_inter
    649 
    650625       coupling_mode_remote = 'atmosphere_to_ocean'
    651626
     
    654629
    655630!
    656 !-- Determine the number of ghost points
    657     IF (scalar_advec == 'ws-scheme' .OR. momentum_advec == 'ws-scheme') THEN
     631!-- Determine the number of ghost point layers
     632    IF ( scalar_advec == 'ws-scheme' .OR. momentum_advec == 'ws-scheme' ) THEN
    658633       nbgp = 3
    659634    ELSE
    660635       nbgp = 1
    661     END IF
    662 
    663 !
    664 !-- In case of coupled runs, create a new MPI derived datatype for the
    665 !-- exchange of surface (xy) data .
    666 !-- Gridpoint number for the exchange of ghost points (xy-plane)
    667 
     636    ENDIF
     637
     638!
     639!-- Create a new MPI derived datatype for the exchange of surface (xy) data,
     640!-- which is needed for coupled atmosphere-ocean runs.
     641!-- First, calculate number of grid points of an xy-plane.
    668642    ngp_xy  = ( nxr - nxl + 1 + 2 * nbgp ) * ( nyn - nys + 1 + 2 * nbgp )
    669 
    670 !
    671 !-- Define a new MPI derived datatype for the exchange of ghost points in
    672 !-- y-direction for 2D-arrays (line)
    673643    CALL MPI_TYPE_VECTOR( ngp_xy, 1, nzt-nzb+2, MPI_REAL, type_xy, ierr )
    674644    CALL MPI_TYPE_COMMIT( type_xy, ierr )
    675645
    676 
    677     IF ( TRIM( coupling_mode ) .NE. 'uncoupled' ) THEN
     646    IF ( TRIM( coupling_mode ) /= 'uncoupled' )  THEN
    678647   
    679648!
     
    685654          ny_a = ny
    686655
    687           IF ( myid == 0 ) THEN
    688              CALL MPI_SEND( nx_a, 1, MPI_INTEGER, numprocs, 1, &
    689                             comm_inter, ierr )
    690              CALL MPI_SEND( ny_a, 1, MPI_INTEGER, numprocs, 2, &
    691                             comm_inter, ierr )
    692              CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 3, &
    693                             comm_inter, ierr )
    694              CALL MPI_RECV( nx_o, 1, MPI_INTEGER, numprocs, 4, &
    695                             comm_inter, status, ierr )
    696              CALL MPI_RECV( ny_o, 1, MPI_INTEGER, numprocs, 5, &
    697                             comm_inter, status, ierr )
    698              CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, numprocs, 6, &
     656          IF ( myid == 0 )  THEN
     657
     658             CALL MPI_SEND( nx_a, 1, MPI_INTEGER, numprocs, 1, comm_inter,  &
     659                            ierr )
     660             CALL MPI_SEND( ny_a, 1, MPI_INTEGER, numprocs, 2, comm_inter,  &
     661                            ierr )
     662             CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 3, comm_inter, &
     663                            ierr )
     664             CALL MPI_RECV( nx_o, 1, MPI_INTEGER, numprocs, 4, comm_inter,  &
     665                            status, ierr )
     666             CALL MPI_RECV( ny_o, 1, MPI_INTEGER, numprocs, 5, comm_inter,  &
     667                            status, ierr )
     668             CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, numprocs, 6,      &
    699669                            comm_inter, status, ierr )
    700670          ENDIF
    701671
    702           CALL MPI_BCAST( nx_o, 1, MPI_INTEGER, 0, comm2d, ierr)
    703           CALL MPI_BCAST( ny_o, 1, MPI_INTEGER, 0, comm2d, ierr)
    704           CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr)
     672          CALL MPI_BCAST( nx_o, 1, MPI_INTEGER, 0, comm2d, ierr )
     673          CALL MPI_BCAST( ny_o, 1, MPI_INTEGER, 0, comm2d, ierr )
     674          CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr )
    705675       
    706676       ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
     
    710680
    711681          IF ( myid == 0 ) THEN
    712              CALL MPI_RECV( nx_a, 1, MPI_INTEGER, 0, 1, &
    713                             comm_inter, status, ierr )
    714              CALL MPI_RECV( ny_a, 1, MPI_INTEGER, 0, 2, &
    715                             comm_inter, status, ierr )
    716              CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, 0, 3, &
    717                             comm_inter, status, ierr )
    718              CALL MPI_SEND( nx_o, 1, MPI_INTEGER, 0, 4, &
    719                             comm_inter, ierr )
    720              CALL MPI_SEND( ny_o, 1, MPI_INTEGER, 0, 5, &
    721                             comm_inter, ierr )
    722              CALL MPI_SEND( pdims, 2, MPI_INTEGER, 0, 6, &
    723                             comm_inter, ierr )
     682
     683             CALL MPI_RECV( nx_a, 1, MPI_INTEGER, 0, 1, comm_inter, status, &
     684                            ierr )
     685             CALL MPI_RECV( ny_a, 1, MPI_INTEGER, 0, 2, comm_inter, status, &
     686                            ierr )
     687             CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, 0, 3, comm_inter, &
     688                            status, ierr )
     689             CALL MPI_SEND( nx_o, 1, MPI_INTEGER, 0, 4, comm_inter, ierr )
     690             CALL MPI_SEND( ny_o, 1, MPI_INTEGER, 0, 5, comm_inter, ierr )
     691             CALL MPI_SEND( pdims, 2, MPI_INTEGER, 0, 6, comm_inter, ierr )
    724692          ENDIF
    725693
     
    730698       ENDIF
    731699 
    732        ngp_a = (nx_a+1+2*nbgp)*(ny_a+1+2*nbgp)
    733        ngp_o = (nx_o+1+2*nbgp)*(ny_o+1+2*nbgp)
    734 
    735 !
    736 !--    determine if the horizontal grid and the number of PEs
    737 !--    in ocean and atmosphere is same or not
    738 !--    (different number of PEs still not implemented)
    739        IF ( nx_o == nx_a .AND. ny_o == ny_a .AND.  &
     700       ngp_a = ( nx_a+1 + 2 * nbgp ) * ( ny_a+1 + 2 * nbgp )
     701       ngp_o = ( nx_o+1 + 2 * nbgp ) * ( ny_o+1 + 2 * nbgp )
     702
     703!
     704!--    Determine if the horizontal grid and the number of PEs in ocean and
     705!--    atmosphere is same or not
     706       IF ( nx_o == nx_a  .AND.  ny_o == ny_a  .AND.  &
    740707            pdims(1) == pdims_remote(1) .AND. pdims(2) == pdims_remote(2) ) &
    741708       THEN
     
    748715!--    Determine the target PEs for the exchange between ocean and
    749716!--    atmosphere (comm2d)
    750        IF ( coupling_topology == 0) THEN
    751           IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN
     717       IF ( coupling_topology == 0 )  THEN
     718!
     719!--       In case of identical topologies, every atmosphere PE has exactly one
     720!--       ocean PE counterpart and vice versa
     721          IF ( TRIM( coupling_mode ) == 'atmosphere_to_ocean' ) THEN
    752722             target_id = myid + numprocs
    753723          ELSE
     
    756726
    757727       ELSE
    758 
    759728!
    760729!--       In case of nonequivalent topology in ocean and atmosphere only for
    761730!--       PE0 in ocean and PE0 in atmosphere a target_id is needed, since
    762 !--       data echxchange between ocean and atmosphere will be done only by
    763 !--       those PEs.   
    764           IF ( myid == 0 ) THEN
    765              IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN
     731!--       data echxchange between ocean and atmosphere will be done only
     732!--       between these PEs.   
     733          IF ( myid == 0 )  THEN
     734
     735             IF ( TRIM( coupling_mode ) == 'atmosphere_to_ocean' )  THEN
    766736                target_id = numprocs
    767737             ELSE
    768738                target_id = 0
    769739             ENDIF
    770  print*, coupling_mode, myid, " -> ", target_id, "numprocs: ", numprocs
     740
    771741          ENDIF
     742
    772743       ENDIF
    773744
     
    861832!
    862833!--    Find out, if the total domain allows more levels. These additional
    863 !--    levels are processed on PE0 only.
     834!--    levels are identically processed on all PEs.
    864835       IF ( numprocs > 1  .AND.  mg_switch_to_pe0_level /= -1 )  THEN
     836
    865837          IF ( mg_levels_z > MIN( mg_levels_x, mg_levels_y ) )  THEN
     838
    866839             mg_switch_to_pe0_level_l = maximum_grid_level
    867840
     
    889862                mg_switch_to_pe0_level_l = 0
    890863             ENDIF
     864
    891865          ELSE
     866
    892867             mg_switch_to_pe0_level_l = 0
    893868             maximum_grid_level_l = maximum_grid_level
     869
    894870          ENDIF
    895871
     
    920896
    921897             ENDIF
     898
    922899          ENDIF
    923900
     
    939916!--          Save the grid size of the subdomain at the switch level, because
    940917!--          it is needed in poismg.
    941 !--          Array bounds of the local subdomain grids are gathered on PE0
    942918             ind(1) = nxl_l; ind(2) = nxr_l
    943919             ind(3) = nys_l; ind(4) = nyn_l
     
    953929             DEALLOCATE( ind_all )
    954930!
    955 !--          Calculate the grid size of the total domain gathered on PE0
     931!--          Calculate the grid size of the total domain
    956932             nxr_l = ( nxr_l-nxl_l+1 ) * pdims(1) - 1
    957933             nxl_l = 0
     
    1006982
    1007983!
    1008 !-- Define a new MPI derived datatype for the exchange of ghost points in
    1009 !-- y-direction for 2D-arrays (line)
    1010     CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_REAL, type_x, ierr )
     984!-- Define new MPI derived datatypes for the exchange of ghost points in
     985!-- x- and y-direction for 2D-arrays (line)
     986    CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_REAL, type_x, &
     987                          ierr )
    1011988    CALL MPI_TYPE_COMMIT( type_x, ierr )
    1012     CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_INTEGER, type_x_int, ierr )
     989    CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_INTEGER, &
     990                          type_x_int, ierr )
    1013991    CALL MPI_TYPE_COMMIT( type_x_int, ierr )
    1014992
     
    10291007
    10301008    nxl_l = nxl; nxr_l = nxr; nys_l = nys; nyn_l = nyn; nzb_l = nzb; nzt_l = nzt
     1009
    10311010!
    10321011!-- Discern between the model grid, which needs nbgp ghost points and
    10331012!-- grid levels for the multigrid scheme. In the latter case only one
    10341013!-- ghost point is necessary.
    1035 !-- First definition of mpi-vectors for exchange of ghost layers on normal
     1014!-- First definition of MPI-datatypes for exchange of ghost layers on normal
    10361015!-- grid. The following loop is needed for data exchange in poismg.f90.
    10371016!
    10381017!-- Determine number of grid points of yz-layer for exchange
    10391018    ngp_yz(0) = (nzt - nzb + 2) * (nyn - nys + 1 + 2 * nbgp)
    1040 !
    1041 !-- Define a new mpi datatype for the exchange of left - right boundaries.
    1042 !-- Indeed the data are connected in the physical memory and no mpi-vector
    1043 !-- is necessary, but the data exchange between left and right PE's using
    1044 !-- mpi-vectors is 10% faster than without.
     1019
     1020!
     1021!-- Define an MPI-datatype for the exchange of left/right boundaries.
     1022!-- Although data are contiguous in physical memory (which does not
     1023!-- necessarily require an MPI-derived datatype), the data exchange between
     1024!-- left and right PE's using the MPI-derived type is 10% faster than without.
    10451025    CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp*(nzt-nzb+2), ngp_yz(0), &
    1046                              MPI_REAL, type_xz(0), ierr )
     1026                          MPI_REAL, type_xz(0), ierr )
    10471027    CALL MPI_TYPE_COMMIT( type_xz(0), ierr )
    10481028
    1049     CALL MPI_TYPE_VECTOR( nbgp, ngp_yz(0), ngp_yz(0), MPI_REAL, type_yz(0), ierr)
     1029    CALL MPI_TYPE_VECTOR( nbgp, ngp_yz(0), ngp_yz(0), MPI_REAL, type_yz(0), &
     1030                          ierr )
    10501031    CALL MPI_TYPE_COMMIT( type_yz(0), ierr )
    1051 !
    1052 !-- Definition of mpi-vectors for multigrid
     1032
     1033!
     1034!-- Definition of MPI-datatypes for multigrid method (coarser level grids)
    10531035    IF ( psolver == 'multigrid' )  THEN
    10541036!   
    1055 !--   The definition of mpi-vectors as aforementioned, but only 1 ghost point is used.
    1056        DO i = maximum_grid_level, 1 , -1
     1037!--    Definition of MPI-datatyoe as above, but only 1 ghost level is used
     1038       DO  i = maximum_grid_level, 1 , -1
     1039
    10571040          ngp_yz(i) = (nzt_l - nzb_l + 2) * (nyn_l - nys_l + 3)
    10581041
    10591042          CALL MPI_TYPE_VECTOR( nxr_l-nxl_l+3, nzt_l-nzb_l+2, ngp_yz(i), &
    1060                              MPI_REAL, type_xz(i), ierr )
     1043                                MPI_REAL, type_xz(i), ierr )
    10611044          CALL MPI_TYPE_COMMIT( type_xz(i), ierr )
    10621045
    1063           CALL MPI_TYPE_VECTOR( 1, ngp_yz(i), ngp_yz(i), MPI_REAL, type_yz(i), ierr)
     1046          CALL MPI_TYPE_VECTOR( 1, ngp_yz(i), ngp_yz(i), MPI_REAL, type_yz(i), &
     1047                                ierr )
    10641048          CALL MPI_TYPE_COMMIT( type_yz(i), ierr )
    10651049
     
    10691053          nyn_l = nyn_l / 2
    10701054          nzt_l = nzt_l / 2
     1055
    10711056       ENDDO
    1072     END IF
     1057
     1058    ENDIF
    10731059#endif
    10741060
Note: See TracChangeset for help on using the changeset viewer.