Changeset 3065 for palm/trunk/SOURCE/vertical_nesting_mod.f90
- Timestamp:
- Jun 12, 2018 7:03:02 AM (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/vertical_nesting_mod.f90
r3049 r3065 26 26 ! ----------------- 27 27 ! $Id$ 28 ! dz was replaced by dz(1), error messages related to vertical grid stretching 29 ! have been added 30 ! 31 ! 3049 2018-05-29 13:52:36Z Giersch 28 32 ! Error messages revised 29 33 ! … … 73 77 !> after spin-up of the CG 74 78 !> 79 !> @todo Replace dz(1) appropriatly to account for grid stretching 75 80 !> @todo Ensure that code can be compiled for serial and parallel mode. Please 76 81 !> check the placement of the directive "__parallel". … … 3639 3644 3640 3645 USE control_parameters, & 3641 ONLY: coupling_mode, coupling_mode_remote, coupling_topology, dz 3646 ONLY: coupling_mode, coupling_mode_remote, coupling_topology, dz, & 3647 dz_stretch_level_start, message_string 3642 3648 3643 3649 USE grid_variables, & … … 3678 3684 dxc = dx 3679 3685 dyc = dy 3680 dzc = dz 3686 dzc = dz(1) 3681 3687 cg_nprocs = numprocs 3682 3688 3683 3689 IF ( myid == 0 ) THEN 3684 3690 3685 CALL MPI_SEND( nxc, 1, MPI_INTEGER , numprocs, 1, comm_inter, 3691 CALL MPI_SEND( nxc, 1, MPI_INTEGER , numprocs, 1, comm_inter, & 3686 3692 ierr ) 3687 CALL MPI_SEND( nyc, 1, MPI_INTEGER , numprocs, 2, comm_inter, 3693 CALL MPI_SEND( nyc, 1, MPI_INTEGER , numprocs, 2, comm_inter, & 3688 3694 ierr ) 3689 CALL MPI_SEND( nzc, 1, MPI_INTEGER , numprocs, 3, comm_inter, 3695 CALL MPI_SEND( nzc, 1, MPI_INTEGER , numprocs, 3, comm_inter, & 3690 3696 ierr ) 3691 CALL MPI_SEND( dxc, 1, MPI_REAL , numprocs, 4, comm_inter, 3697 CALL MPI_SEND( dxc, 1, MPI_REAL , numprocs, 4, comm_inter, & 3692 3698 ierr ) 3693 CALL MPI_SEND( dyc, 1, MPI_REAL , numprocs, 5, comm_inter, 3699 CALL MPI_SEND( dyc, 1, MPI_REAL , numprocs, 5, comm_inter, & 3694 3700 ierr ) 3695 CALL MPI_SEND( dzc, 1, MPI_REAL , numprocs, 6, comm_inter, 3701 CALL MPI_SEND( dzc, 1, MPI_REAL , numprocs, 6, comm_inter, & 3696 3702 ierr ) 3697 CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 7, comm_inter, 3703 CALL MPI_SEND( pdims, 2, MPI_INTEGER, numprocs, 7, comm_inter, & 3698 3704 ierr ) 3699 3705 CALL MPI_SEND( cg_nprocs, 1, MPI_INTEGER, numprocs, 8, comm_inter, & 3700 3706 ierr ) 3701 CALL MPI_RECV( nxf, 1, MPI_INTEGER, numprocs, 21, comm_inter, 3707 CALL MPI_RECV( nxf, 1, MPI_INTEGER, numprocs, 21, comm_inter, & 3702 3708 status, ierr ) 3703 CALL MPI_RECV( nyf, 1, MPI_INTEGER, numprocs, 22, comm_inter, 3709 CALL MPI_RECV( nyf, 1, MPI_INTEGER, numprocs, 22, comm_inter, & 3704 3710 status, ierr ) 3705 CALL MPI_RECV( nzf, 1, MPI_INTEGER, numprocs, 23, comm_inter, 3711 CALL MPI_RECV( nzf, 1, MPI_INTEGER, numprocs, 23, comm_inter, & 3706 3712 status, ierr ) 3707 CALL MPI_RECV( dxf, 1, MPI_REAL, numprocs, 24, comm_inter, 3713 CALL MPI_RECV( dxf, 1, MPI_REAL, numprocs, 24, comm_inter, & 3708 3714 status, ierr ) 3709 CALL MPI_RECV( dyf, 1, MPI_REAL, numprocs, 25, comm_inter, 3715 CALL MPI_RECV( dyf, 1, MPI_REAL, numprocs, 25, comm_inter, & 3710 3716 status, ierr ) 3711 CALL MPI_RECV( dzf, 1, MPI_REAL, numprocs, 26, comm_inter, 3717 CALL MPI_RECV( dzf, 1, MPI_REAL, numprocs, 26, comm_inter, & 3712 3718 status, ierr ) 3713 CALL MPI_RECV( pdims_partner, 2, MPI_INTEGER, 3719 CALL MPI_RECV( pdims_partner, 2, MPI_INTEGER, & 3714 3720 numprocs, 27, comm_inter, status, ierr ) 3715 CALL MPI_RECV( fg_nprocs, 1, MPI_INTEGER, &3721 CALL MPI_RECV( fg_nprocs, 1, MPI_INTEGER, & 3716 3722 numprocs, 28, comm_inter, status, ierr ) 3717 3723 ENDIF … … 3725 3731 CALL MPI_BCAST( pdims_partner, 2, MPI_INTEGER, 0, comm2d, ierr ) 3726 3732 CALL MPI_BCAST( fg_nprocs, 1, MPI_INTEGER, 0, comm2d, ierr ) 3733 3734 ! 3735 !-- Check if stretching is used within the nested domain. ABS(...) is 3736 !-- necessary because of the default value of -9999999.9_wp (negative) 3737 IF ( ABS( dz_stretch_level_start(1) ) <= (nzf+1)*dzf ) THEN 3738 message_string = 'Stretching in the parent domain is '// & 3739 'only allowed above the nested domain' 3740 CALL message( 'vertical_nesting_mod', 'PA0497', 1, 2, 0, 6, 0 ) 3741 ENDIF 3727 3742 3728 3743 ELSEIF ( coupling_mode == 'vnested_fine' ) THEN … … 3733 3748 dxf = dx 3734 3749 dyf = dy 3735 dzf = dz 3750 dzf = dz(1) 3736 3751 fg_nprocs = numprocs 3737 3752 … … 3774 3789 3775 3790 ENDIF 3776 3791 3777 3792 ngp_c = ( nxc+1 + 2 * nbgp ) * ( nyc+1 + 2 * nbgp ) 3778 3793 ngp_f = ( nxf+1 + 2 * nbgp ) * ( nyf+1 + 2 * nbgp ) … … 3910 3925 3911 3926 #if defined( __parallel ) 3912 USE arrays_3d, 3927 USE arrays_3d, & 3913 3928 ONLY: zu, zw 3914 3929 3915 USE control_parameters, 3916 ONLY: coupling_mode 3930 USE control_parameters, & 3931 ONLY: coupling_mode, message_string, number_stretch_level_start 3917 3932 3918 USE indices, 3933 USE indices, & 3919 3934 ONLY: nzt 3920 3935 … … 3925 3940 IMPLICIT NONE 3926 3941 3927 !-- Allocate and Exchange zuc and zuf, zwc and zwf 3942 ! 3943 !-- Allocate and Exchange zuc and zuf, zwc and zwf 3928 3944 IF ( coupling_mode(1:8) == 'vnested_' ) THEN 3929 3945 … … 3932 3948 3933 3949 IF ( coupling_mode == 'vnested_crse' ) THEN 3934 3935 zuc = zu 3936 zwc = zw 3950 3951 zuc = zu 3952 zwc = zw 3953 3937 3954 IF ( myid == 0 ) THEN 3938 3955 … … 3954 3971 ELSEIF ( coupling_mode == 'vnested_fine' ) THEN 3955 3972 3956 zuf = zu 3957 zwf = zw 3973 ! 3974 !-- Check if stretching is used within the nested domain 3975 IF ( number_stretch_level_start > 0 ) THEN 3976 message_string = 'Stretching in the nested domain is not '//& 3977 'allowed' 3978 CALL message( 'vertical_nesting_mod', 'PA0498', 1, 2, 0, 6, 0 ) 3979 ENDIF 3980 3981 zuf = zu 3982 zwf = zw 3983 3958 3984 IF ( myid == 0 ) THEN 3959 3985
Note: See TracChangeset
for help on using the changeset viewer.