Changeset 709 for palm/trunk/SOURCE/init_pegrid.f90
- Timestamp:
- Mar 30, 2011 9:31:40 AM (13 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/init_pegrid.f90
r708 r709 4 4 ! Current revisions: 5 5 ! ----------------- 6 ! 6 ! formatting adjustments 7 7 ! 8 8 ! ATTENTION: nnz_x undefined problem still has to be solved!!!!!!!! … … 577 577 578 578 CALL MPI_OPEN_PORT( MPI_INFO_NULL, port_name, ierr ) 579 !580 !-- TEST OUTPUT (TO BE REMOVED)581 WRITE(9,*) TRIM( coupling_mode ), &582 ', ierr after MPI_OPEN_PORT: ', ierr583 CALL LOCAL_FLUSH( 9 )584 579 585 580 CALL MPI_PUBLISH_NAME( 'palm_coupler', MPI_INFO_NULL, port_name, & 586 581 ierr ) 587 !588 !-- TEST OUTPUT (TO BE REMOVED)589 WRITE(9,*) TRIM( coupling_mode ), &590 ', ierr after MPI_PUBLISH_NAME: ', ierr591 CALL LOCAL_FLUSH( 9 )592 582 593 583 ! … … 614 604 615 605 CALL MPI_LOOKUP_NAME( 'palm_coupler', MPI_INFO_NULL, port_name, ierr ) 616 !617 !-- TEST OUTPUT (TO BE REMOVED)618 WRITE(9,*) TRIM( coupling_mode ), &619 ', ierr after MPI_LOOKUP_NAME: ', ierr620 CALL LOCAL_FLUSH( 9 )621 622 606 623 607 ENDIF … … 631 615 IF ( coupling_mode == 'atmosphere_to_ocean' ) THEN 632 616 633 PRINT*, '... before COMM_ACCEPT'634 617 CALL MPI_COMM_ACCEPT( port_name, MPI_INFO_NULL, 0, MPI_COMM_WORLD, & 635 618 comm_inter, ierr ) 636 PRINT*, '--- ierr = ', ierr637 PRINT*, '--- comm_inter atmosphere = ', comm_inter638 639 619 coupling_mode_remote = 'ocean_to_atmosphere' 640 620 641 621 ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN 642 622 643 IF ( myid == 0 ) PRINT*, '*** read: ', port_name, ' ierr = ', ierr644 PRINT*, '... before COMM_CONNECT'645 623 CALL MPI_COMM_CONNECT( port_name, MPI_INFO_NULL, 0, MPI_COMM_WORLD, & 646 624 comm_inter, ierr ) 647 PRINT*, '--- ierr = ', ierr648 PRINT*, '--- comm_inter ocean = ', comm_inter649 650 625 coupling_mode_remote = 'atmosphere_to_ocean' 651 626 … … 654 629 655 630 ! 656 !-- Determine the number of ghost point s657 IF ( scalar_advec == 'ws-scheme' .OR. momentum_advec == 'ws-scheme')THEN631 !-- Determine the number of ghost point layers 632 IF ( scalar_advec == 'ws-scheme' .OR. momentum_advec == 'ws-scheme' ) THEN 658 633 nbgp = 3 659 634 ELSE 660 635 nbgp = 1 661 END IF 662 663 ! 664 !-- In case of coupled runs, create a new MPI derived datatype for the 665 !-- exchange of surface (xy) data . 666 !-- Gridpoint number for the exchange of ghost points (xy-plane) 667 636 ENDIF 637 638 ! 639 !-- Create a new MPI derived datatype for the exchange of surface (xy) data, 640 !-- which is needed for coupled atmosphere-ocean runs. 641 !-- First, calculate number of grid points of an xy-plane. 668 642 ngp_xy = ( nxr - nxl + 1 + 2 * nbgp ) * ( nyn - nys + 1 + 2 * nbgp ) 669 670 !671 !-- Define a new MPI derived datatype for the exchange of ghost points in672 !-- y-direction for 2D-arrays (line)673 643 CALL MPI_TYPE_VECTOR( ngp_xy, 1, nzt-nzb+2, MPI_REAL, type_xy, ierr ) 674 644 CALL MPI_TYPE_COMMIT( type_xy, ierr ) 675 645 676 677 IF ( TRIM( coupling_mode ) .NE. 'uncoupled' ) THEN 646 IF ( TRIM( coupling_mode ) /= 'uncoupled' ) THEN 678 647 679 648 ! … … 685 654 ny_a = ny 686 655 687 IF ( myid == 0 ) THEN 688 CALL MPI_SEND( nx_a, 1, MPI_INTEGER, numprocs, 1, & 689 comm_inter, ierr ) 690 CALL MPI_SEND( ny_a, 1, MPI_INTEGER, numprocs, 2, & 691 comm_inter, ierr ) 692 CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 3, & 693 comm_inter, ierr ) 694 CALL MPI_RECV( nx_o, 1, MPI_INTEGER, numprocs, 4, & 695 comm_inter, status, ierr ) 696 CALL MPI_RECV( ny_o, 1, MPI_INTEGER, numprocs, 5, & 697 comm_inter, status, ierr ) 698 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, numprocs, 6, & 656 IF ( myid == 0 ) THEN 657 658 CALL MPI_SEND( nx_a, 1, MPI_INTEGER, numprocs, 1, comm_inter, & 659 ierr ) 660 CALL MPI_SEND( ny_a, 1, MPI_INTEGER, numprocs, 2, comm_inter, & 661 ierr ) 662 CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 3, comm_inter, & 663 ierr ) 664 CALL MPI_RECV( nx_o, 1, MPI_INTEGER, numprocs, 4, comm_inter, & 665 status, ierr ) 666 CALL MPI_RECV( ny_o, 1, MPI_INTEGER, numprocs, 5, comm_inter, & 667 status, ierr ) 668 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, numprocs, 6, & 699 669 comm_inter, status, ierr ) 700 670 ENDIF 701 671 702 CALL MPI_BCAST( nx_o, 1, MPI_INTEGER, 0, comm2d, ierr )703 CALL MPI_BCAST( ny_o, 1, MPI_INTEGER, 0, comm2d, ierr )704 CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr )672 CALL MPI_BCAST( nx_o, 1, MPI_INTEGER, 0, comm2d, ierr ) 673 CALL MPI_BCAST( ny_o, 1, MPI_INTEGER, 0, comm2d, ierr ) 674 CALL MPI_BCAST( pdims_remote, 2, MPI_INTEGER, 0, comm2d, ierr ) 705 675 706 676 ELSEIF ( coupling_mode == 'ocean_to_atmosphere' ) THEN … … 710 680 711 681 IF ( myid == 0 ) THEN 712 CALL MPI_RECV( nx_a, 1, MPI_INTEGER, 0, 1, & 713 comm_inter, status, ierr ) 714 CALL MPI_RECV( ny_a, 1, MPI_INTEGER, 0, 2, & 715 comm_inter, status, ierr ) 716 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, 0, 3, & 717 comm_inter, status, ierr ) 718 CALL MPI_SEND( nx_o, 1, MPI_INTEGER, 0, 4, & 719 comm_inter, ierr ) 720 CALL MPI_SEND( ny_o, 1, MPI_INTEGER, 0, 5, & 721 comm_inter, ierr ) 722 CALL MPI_SEND( pdims, 2, MPI_INTEGER, 0, 6, & 723 comm_inter, ierr ) 682 683 CALL MPI_RECV( nx_a, 1, MPI_INTEGER, 0, 1, comm_inter, status, & 684 ierr ) 685 CALL MPI_RECV( ny_a, 1, MPI_INTEGER, 0, 2, comm_inter, status, & 686 ierr ) 687 CALL MPI_RECV( pdims_remote, 2, MPI_INTEGER, 0, 3, comm_inter, & 688 status, ierr ) 689 CALL MPI_SEND( nx_o, 1, MPI_INTEGER, 0, 4, comm_inter, ierr ) 690 CALL MPI_SEND( ny_o, 1, MPI_INTEGER, 0, 5, comm_inter, ierr ) 691 CALL MPI_SEND( pdims, 2, MPI_INTEGER, 0, 6, comm_inter, ierr ) 724 692 ENDIF 725 693 … … 730 698 ENDIF 731 699 732 ngp_a = (nx_a+1+2*nbgp)*(ny_a+1+2*nbgp) 733 ngp_o = (nx_o+1+2*nbgp)*(ny_o+1+2*nbgp) 734 735 ! 736 !-- determine if the horizontal grid and the number of PEs 737 !-- in ocean and atmosphere is same or not 738 !-- (different number of PEs still not implemented) 739 IF ( nx_o == nx_a .AND. ny_o == ny_a .AND. & 700 ngp_a = ( nx_a+1 + 2 * nbgp ) * ( ny_a+1 + 2 * nbgp ) 701 ngp_o = ( nx_o+1 + 2 * nbgp ) * ( ny_o+1 + 2 * nbgp ) 702 703 ! 704 !-- Determine if the horizontal grid and the number of PEs in ocean and 705 !-- atmosphere is same or not 706 IF ( nx_o == nx_a .AND. ny_o == ny_a .AND. & 740 707 pdims(1) == pdims_remote(1) .AND. pdims(2) == pdims_remote(2) ) & 741 708 THEN … … 748 715 !-- Determine the target PEs for the exchange between ocean and 749 716 !-- atmosphere (comm2d) 750 IF ( coupling_topology == 0) THEN 751 IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN 717 IF ( coupling_topology == 0 ) THEN 718 ! 719 !-- In case of identical topologies, every atmosphere PE has exactly one 720 !-- ocean PE counterpart and vice versa 721 IF ( TRIM( coupling_mode ) == 'atmosphere_to_ocean' ) THEN 752 722 target_id = myid + numprocs 753 723 ELSE … … 756 726 757 727 ELSE 758 759 728 ! 760 729 !-- In case of nonequivalent topology in ocean and atmosphere only for 761 730 !-- PE0 in ocean and PE0 in atmosphere a target_id is needed, since 762 !-- data echxchange between ocean and atmosphere will be done only by 763 !-- those PEs. 764 IF ( myid == 0 ) THEN 765 IF ( TRIM( coupling_mode ) .EQ. 'atmosphere_to_ocean' ) THEN 731 !-- data echxchange between ocean and atmosphere will be done only 732 !-- between these PEs. 733 IF ( myid == 0 ) THEN 734 735 IF ( TRIM( coupling_mode ) == 'atmosphere_to_ocean' ) THEN 766 736 target_id = numprocs 767 737 ELSE 768 738 target_id = 0 769 739 ENDIF 770 print*, coupling_mode, myid, " -> ", target_id, "numprocs: ", numprocs 740 771 741 ENDIF 742 772 743 ENDIF 773 744 … … 861 832 ! 862 833 !-- Find out, if the total domain allows more levels. These additional 863 !-- levels are processed on PE0 only.834 !-- levels are identically processed on all PEs. 864 835 IF ( numprocs > 1 .AND. mg_switch_to_pe0_level /= -1 ) THEN 836 865 837 IF ( mg_levels_z > MIN( mg_levels_x, mg_levels_y ) ) THEN 838 866 839 mg_switch_to_pe0_level_l = maximum_grid_level 867 840 … … 889 862 mg_switch_to_pe0_level_l = 0 890 863 ENDIF 864 891 865 ELSE 866 892 867 mg_switch_to_pe0_level_l = 0 893 868 maximum_grid_level_l = maximum_grid_level 869 894 870 ENDIF 895 871 … … 920 896 921 897 ENDIF 898 922 899 ENDIF 923 900 … … 939 916 !-- Save the grid size of the subdomain at the switch level, because 940 917 !-- it is needed in poismg. 941 !-- Array bounds of the local subdomain grids are gathered on PE0942 918 ind(1) = nxl_l; ind(2) = nxr_l 943 919 ind(3) = nys_l; ind(4) = nyn_l … … 953 929 DEALLOCATE( ind_all ) 954 930 ! 955 !-- Calculate the grid size of the total domain gathered on PE0931 !-- Calculate the grid size of the total domain 956 932 nxr_l = ( nxr_l-nxl_l+1 ) * pdims(1) - 1 957 933 nxl_l = 0 … … 1006 982 1007 983 ! 1008 !-- Define a new MPI derived datatype for the exchange of ghost points in 1009 !-- y-direction for 2D-arrays (line) 1010 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_REAL, type_x, ierr ) 984 !-- Define new MPI derived datatypes for the exchange of ghost points in 985 !-- x- and y-direction for 2D-arrays (line) 986 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_REAL, type_x, & 987 ierr ) 1011 988 CALL MPI_TYPE_COMMIT( type_x, ierr ) 1012 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_INTEGER, type_x_int, ierr ) 989 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp, ngp_y, MPI_INTEGER, & 990 type_x_int, ierr ) 1013 991 CALL MPI_TYPE_COMMIT( type_x_int, ierr ) 1014 992 … … 1029 1007 1030 1008 nxl_l = nxl; nxr_l = nxr; nys_l = nys; nyn_l = nyn; nzb_l = nzb; nzt_l = nzt 1009 1031 1010 ! 1032 1011 !-- Discern between the model grid, which needs nbgp ghost points and 1033 1012 !-- grid levels for the multigrid scheme. In the latter case only one 1034 1013 !-- ghost point is necessary. 1035 !-- First definition of mpi-vectors for exchange of ghost layers on normal1014 !-- First definition of MPI-datatypes for exchange of ghost layers on normal 1036 1015 !-- grid. The following loop is needed for data exchange in poismg.f90. 1037 1016 ! 1038 1017 !-- Determine number of grid points of yz-layer for exchange 1039 1018 ngp_yz(0) = (nzt - nzb + 2) * (nyn - nys + 1 + 2 * nbgp) 1040 ! 1041 !-- Define a new mpi datatype for the exchange of left - right boundaries. 1042 !-- Indeed the data are connected in the physical memory and no mpi-vector 1043 !-- is necessary, but the data exchange between left and right PE's using 1044 !-- mpi-vectors is 10% faster than without. 1019 1020 ! 1021 !-- Define an MPI-datatype for the exchange of left/right boundaries. 1022 !-- Although data are contiguous in physical memory (which does not 1023 !-- necessarily require an MPI-derived datatype), the data exchange between 1024 !-- left and right PE's using the MPI-derived type is 10% faster than without. 1045 1025 CALL MPI_TYPE_VECTOR( nxr-nxl+1+2*nbgp, nbgp*(nzt-nzb+2), ngp_yz(0), & 1046 1026 MPI_REAL, type_xz(0), ierr ) 1047 1027 CALL MPI_TYPE_COMMIT( type_xz(0), ierr ) 1048 1028 1049 CALL MPI_TYPE_VECTOR( nbgp, ngp_yz(0), ngp_yz(0), MPI_REAL, type_yz(0), ierr) 1029 CALL MPI_TYPE_VECTOR( nbgp, ngp_yz(0), ngp_yz(0), MPI_REAL, type_yz(0), & 1030 ierr ) 1050 1031 CALL MPI_TYPE_COMMIT( type_yz(0), ierr ) 1051 ! 1052 !-- Definition of mpi-vectors for multigrid 1032 1033 ! 1034 !-- Definition of MPI-datatypes for multigrid method (coarser level grids) 1053 1035 IF ( psolver == 'multigrid' ) THEN 1054 1036 ! 1055 !-- The definition of mpi-vectors as aforementioned, but only 1 ghost point is used. 1056 DO i = maximum_grid_level, 1 , -1 1037 !-- Definition of MPI-datatyoe as above, but only 1 ghost level is used 1038 DO i = maximum_grid_level, 1 , -1 1039 1057 1040 ngp_yz(i) = (nzt_l - nzb_l + 2) * (nyn_l - nys_l + 3) 1058 1041 1059 1042 CALL MPI_TYPE_VECTOR( nxr_l-nxl_l+3, nzt_l-nzb_l+2, ngp_yz(i), & 1060 MPI_REAL, type_xz(i), ierr )1043 MPI_REAL, type_xz(i), ierr ) 1061 1044 CALL MPI_TYPE_COMMIT( type_xz(i), ierr ) 1062 1045 1063 CALL MPI_TYPE_VECTOR( 1, ngp_yz(i), ngp_yz(i), MPI_REAL, type_yz(i), ierr) 1046 CALL MPI_TYPE_VECTOR( 1, ngp_yz(i), ngp_yz(i), MPI_REAL, type_yz(i), & 1047 ierr ) 1064 1048 CALL MPI_TYPE_COMMIT( type_yz(i), ierr ) 1065 1049 … … 1069 1053 nyn_l = nyn_l / 2 1070 1054 nzt_l = nzt_l / 2 1055 1071 1056 ENDDO 1072 END IF 1057 1058 ENDIF 1073 1059 #endif 1074 1060
Note: See TracChangeset
for help on using the changeset viewer.