Changeset 667 for palm/trunk/SOURCE/init_pegrid.f90
- Timestamp:
- Dec 23, 2010 12:06:00 PM (12 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE
-
Property
svn:mergeinfo
set to
(toggle deleted branches)
/palm/branches/suehring 423-666 /palm/branches/letzel/masked_output/SOURCE 296-409
-
Property
svn:mergeinfo
set to
(toggle deleted branches)
-
palm/trunk/SOURCE/init_pegrid.f90
r647 r667 4 4 ! Current revisions: 5 5 ! ----------------- 6 ! 7 ! Moved determination of target_id's from init_coupling 8 ! 9 ! Determination of parameters needed for coupling (coupling_topology, ngp_a, ngp_o) 10 ! with different grid/processor-topology in ocean and atmosphere 11 ! 12 ! 13 ! Adaption of ngp_xy, ngp_y to a dynamic number of ghost points. 14 ! The maximum_grid_level changed from 1 to 0. 0 is the normal grid, 1 to 15 ! maximum_grid_level the grids for multigrid, in which 0 and 1 are normal grids. 16 ! This distinction is due to reasons of data exchange and performance for the 17 ! normal grid and grids in poismg. 18 ! The definition of MPI-Vectors adapted to a dynamic numer of ghost points. 19 ! New MPI-Vectors for data exchange between left and right boundaries added. 20 ! This is due to reasons of performance (10% faster). 6 21 ! 7 22 ! ATTENTION: nnz_x undefined problem still has to be solved!!!!!!!! … … 79 94 80 95 96 81 97 IMPLICIT NONE 82 98 … … 88 104 89 105 INTEGER, DIMENSION(:), ALLOCATABLE :: ind_all, nxlf, nxrf, nynf, nysf 106 107 INTEGER, DIMENSION(2) :: pdims_remote 90 108 91 109 LOGICAL :: found … … 103 121 104 122 #if defined( __parallel ) 123 105 124 ! 106 125 !-- Determine the processor topology or check it, if prescribed by the user … … 624 643 #endif 625 644 645 ! 646 !-- Determine the number of ghost points 647 IF (scalar_advec == 'ws-scheme' .OR. momentum_advec == 'ws-scheme') THEN 648 nbgp = 3 649 ELSE 650 nbgp = 1 651 END IF 652 626 653 ! 627 654 !-- In case of coupled runs, create a new MPI derived datatype for the 628 655 !-- exchange of surface (xy) data . 629 656 !-- Gridpoint number for the exchange of ghost points (xy-plane) 630 ngp_xy = ( nxr - nxl + 3 ) * ( nyn - nys + 3 ) 657 658 ngp_xy = ( nxr - nxl + 1 + 2 * nbgp ) * ( nyn - nys + 1 + 2 * nbgp ) 631 659 632 660 ! … … 635 663 CALL MPI_TYPE_VECTOR( ngp_xy, 1, nzt-nzb+2, MPI_REAL, type_xy, ierr ) 636 664 CALL MPI_TYPE_COMMIT( type_xy, ierr ) 665 666 667 IF ( TRIM( coupling_mode ) .NE. 'uncoupled' ) THEN 668 669 ! 670 !-- Pass the number of grid points of the atmosphere model to 671 !-- the ocean model and vice versa 672 IF ( coupling_mode == 'atmosphere_to_ocean' ) THEN 673 674 nx_a = nx 675 ny_a = ny 676 677 IF ( myid == 0 ) THEN 678 CALL MPI_SEND( nx_a, 1, MPI_INTEGER, numprocs, 1, & 679 comm_inter, ierr ) 680 CALL MPI_SEND( ny_a, 1, MPI_INTEGER, numprocs, 2, & 681 comm_inter, ierr ) 682 CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 3, & 683 comm_inter, ierr ) 684 CALL MPI_RECV( nx_o, 1, MPI_INTEGER, numprocs, 4, & 685 comm_inter, status, ierr ) 686 CALL MPI_RECV( ny_o, 1, MPI_INTEGER, numprocs, 5, & 687 comm_inter, status, ierr ) 688 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, numprocs, 6, & 689 comm_inter, status, ierr ) 690 ENDIF 691 692 CALL MPI_BCAST( nx_o, 1, MPI_INTEGER, 0, comm2d, ierr) 693 CALL MPI_BCAST( ny_o, 1, MPI_INTEGER, 0, comm2d, ierr) 694 CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr) 695 696 ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN 697 698 nx_o = nx 699 ny_o = ny 700 701 IF ( myid == 0 ) THEN 702 CALL MPI_RECV( nx_a, 1, MPI_INTEGER, 0, 1, & 703 comm_inter, status, ierr ) 704 CALL MPI_RECV( ny_a, 1, MPI_INTEGER, 0, 2, & 705 comm_inter, status, ierr ) 706 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, 0, 3, & 707 comm_inter, status, ierr ) 708 CALL MPI_SEND( nx_o, 1, MPI_INTEGER, 0, 4, & 709 comm_inter, ierr ) 710 CALL MPI_SEND( ny_o, 1, MPI_INTEGER, 0, 5, & 711 comm_inter, ierr ) 712 CALL MPI_SEND( pdims, 2, MPI_INTEGER, 0, 6, & 713 comm_inter, ierr ) 714 ENDIF 715 716 CALL MPI_BCAST( nx_a, 1, MPI_INTEGER, 0, comm2d, ierr) 717 CALL MPI_BCAST( ny_a, 1, MPI_INTEGER, 0, comm2d, ierr) 718 CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr) 719 720 ENDIF 721 722 ngp_a = (nx_a+1+2*nbgp)*(ny_a+1+2*nbgp) 723 ngp_o = (nx_o+1+2*nbgp)*(ny_o+1+2*nbgp) 724 725 ! 726 !-- determine if the horizontal grid and the number of PEs 727 !-- in ocean and atmosphere is same or not 728 !-- (different number of PEs still not implemented) 729 IF ( nx_o == nx_a .AND. ny_o == ny_a .AND. & 730 pdims(1) == pdims_remote(1) .AND. pdims(2) == pdims_remote(2) ) & 731 THEN 732 coupling_topology = 0 733 ELSE 734 coupling_topology = 1 735 ENDIF 736 737 ! 738 !-- Determine the target PEs for the exchange between ocean and 739 !-- atmosphere (comm2d) 740 IF ( coupling_topology == 0) THEN 741 IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN 742 target_id = myid + numprocs 743 ELSE 744 target_id = myid 745 ENDIF 746 747 ELSE 748 749 ! 750 !-- In case of nonequivalent topology in ocean and atmosphere only for 751 !-- PE0 in ocean and PE0 in atmosphere a target_id is needed, since 752 !-- data echxchange between ocean and atmosphere will be done only by 753 !-- those PEs. 754 IF ( myid == 0 ) THEN 755 IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN 756 target_id = numprocs 757 ELSE 758 target_id = 0 759 ENDIF 760 print*, coupling_mode, myid, " -> ", target_id, "numprocs: ", numprocs 761 ENDIF 762 ENDIF 763 764 ENDIF 765 766 637 767 #endif 638 768 … … 854 984 ELSE 855 985 856 maximum_grid_level = 1986 maximum_grid_level = 0 857 987 858 988 ENDIF … … 863 993 ! 864 994 !-- Gridpoint number for the exchange of ghost points (y-line for 2D-arrays) 865 ngp_y = nyn - nys + 1 995 ngp_y = nyn - nys + 1 + 2 * nbgp 866 996 867 997 ! 868 998 !-- Define a new MPI derived datatype for the exchange of ghost points in 869 999 !-- y-direction for 2D-arrays (line) 870 CALL MPI_TYPE_VECTOR( nxr-nxl+ 3, 1, ngp_y+2, MPI_REAL, type_x, ierr )1000 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_REAL, type_x, ierr ) 871 1001 CALL MPI_TYPE_COMMIT( type_x, ierr ) 872 CALL MPI_TYPE_VECTOR( nxr-nxl+ 3, 1, ngp_y+2, MPI_INTEGER, type_x_int, ierr )1002 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_INTEGER, type_x_int, ierr ) 873 1003 CALL MPI_TYPE_COMMIT( type_x_int, ierr ) 1004 1005 CALL MPI_TYPE_VECTOR( nbgp, ngp_y, ngp_y, MPI_REAL, type_y, ierr ) 1006 CALL MPI_TYPE_COMMIT( type_y, ierr ) 1007 CALL MPI_TYPE_VECTOR( nbgp, ngp_y, ngp_y, MPI_INTEGER, type_y_int, ierr ) 1008 CALL MPI_TYPE_COMMIT( type_y_int, ierr ) 1009 874 1010 875 1011 ! … … 879 1015 !-- Do these calculations for the model grid and (if necessary) also 880 1016 !-- for the coarser grid levels used in the multigrid method 881 ALLOCATE ( ngp_yz(maximum_grid_level), type_xz(maximum_grid_level) ) 1017 ALLOCATE ( ngp_yz(0:maximum_grid_level), type_xz(0:maximum_grid_level),& 1018 type_yz(0:maximum_grid_level) ) 882 1019 883 1020 nxl_l = nxl; nxr_l = nxr; nys_l = nys; nyn_l = nyn; nzb_l = nzb; nzt_l = nzt 884 885 DO i = maximum_grid_level, 1 , -1 886 ngp_yz(i) = (nzt_l - nzb_l + 2) * (nyn_l - nys_l + 3) 887 888 CALL MPI_TYPE_VECTOR( nxr_l-nxl_l+3, nzt_l-nzb_l+2, ngp_yz(i), & 1021 ! 1022 !-- Discern between the model grid, which needs nbgp ghost points and 1023 !-- grid levels for the multigrid scheme. In the latter case only one 1024 !-- ghost point is necessary. 1025 !-- First definition of mpi-vectors for exchange of ghost layers on normal 1026 !-- grid. The following loop is needed for data exchange in poismg.f90. 1027 ! 1028 !-- Determine number of grid points of yz-layer for exchange 1029 ngp_yz(0) = (nzt - nzb + 2) * (nyn - nys + 1 + 2 * nbgp) 1030 ! 1031 !-- Define a new mpi datatype for the exchange of left - right boundaries. 1032 !-- Indeed the data are connected in the physical memory and no mpi-vector 1033 !-- is necessary, but the data exchange between left and right PE's using 1034 !-- mpi-vectors is 10% faster than without. 1035 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp*(nzt-nzb+2), ngp_yz(0), & 1036 MPI_REAL, type_xz(0), ierr ) 1037 CALL MPI_TYPE_COMMIT( type_xz(0), ierr ) 1038 1039 CALL MPI_TYPE_VECTOR( nbgp, ngp_yz(0), ngp_yz(0), MPI_REAL, type_yz(0), ierr) 1040 CALL MPI_TYPE_COMMIT( type_yz(0), ierr ) 1041 ! 1042 !-- Definition of mpi-vectors for multigrid 1043 IF ( psolver == 'multigrid' ) THEN 1044 ! 1045 !-- The definition of mpi-vectors as aforementioned, but only 1 ghost point is used. 1046 DO i = maximum_grid_level, 1 , -1 1047 ngp_yz(i) = (nzt_l - nzb_l + 2) * (nyn_l - nys_l + 3) 1048 1049 CALL MPI_TYPE_VECTOR( nxr_l-nxl_l+3, nzt_l-nzb_l+2, ngp_yz(i), & 889 1050 MPI_REAL, type_xz(i), ierr ) 890 CALL MPI_TYPE_COMMIT( type_xz(i), ierr ) 891 892 nxl_l = nxl_l / 2 893 nxr_l = nxr_l / 2 894 nys_l = nys_l / 2 895 nyn_l = nyn_l / 2 896 nzt_l = nzt_l / 2 897 ENDDO 1051 CALL MPI_TYPE_COMMIT( type_xz(i), ierr ) 1052 1053 CALL MPI_TYPE_VECTOR( 1, ngp_yz(i), ngp_yz(i), MPI_REAL, type_yz(i), ierr) 1054 CALL MPI_TYPE_COMMIT( type_yz(i), ierr ) 1055 1056 nxl_l = nxl_l / 2 1057 nxr_l = nxr_l / 2 1058 nys_l = nys_l / 2 1059 nyn_l = nyn_l / 2 1060 nzt_l = nzt_l / 2 1061 ENDDO 1062 END IF 898 1063 #endif 899 1064
Note: See TracChangeset
for help on using the changeset viewer.