Changeset 4010


Ignore:
Timestamp:
May 31, 2019 1:25:08 PM (5 years ago)
Author:
hellstea
Message:

Nest mass conservation correction included

Location:
palm/trunk/SOURCE
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/pmc_interface_mod.f90

    r3987 r4010  
    2525! -----------------
    2626! $Id$
     27! Mass (volume) flux correction included to ensure global mass conservation for child domains.
     28!
     29! 3987 2019-05-22 09:52:13Z kanani
    2730! Introduce alternative switch for debug output during timestepping
    2831!
     
    648651    INTEGER(iwp) :: workarr_t_exchange_type_y
    649652 
    650     INTEGER(iwp), DIMENSION(3)          ::  parent_grid_info_int    !<
    651 
    652     REAL(wp), DIMENSION(7)              ::  parent_grid_info_real   !<
     653    INTEGER(iwp), DIMENSION(3)          ::  parent_grid_info_int    !< Array for communicating the parent-grid dimensions
     654                                                                    !< to its children.
     655
     656    REAL(wp), DIMENSION(6)              ::  face_area               !< Surface area of each boundary face
     657    REAL(wp), DIMENSION(7)              ::  parent_grid_info_real   !< Array for communicating the real-type parent-grid
     658                                                                    !< parameters to its children.
    653659
    654660    TYPE parentgrid_def
     
    714720    END INTERFACE pmci_datatrans
    715721
     722    INTERFACE pmci_ensure_nest_mass_conservation
     723       MODULE PROCEDURE pmci_ensure_nest_mass_conservation
     724    END INTERFACE pmci_ensure_nest_mass_conservation
     725
     726    INTERFACE pmci_ensure_nest_mass_conservation_vertical
     727       MODULE PROCEDURE pmci_ensure_nest_mass_conservation_vertical
     728    END INTERFACE pmci_ensure_nest_mass_conservation_vertical
     729
    716730    INTERFACE pmci_init
    717731       MODULE PROCEDURE pmci_init
     
    758772    PUBLIC pmci_set_swaplevel
    759773    PUBLIC get_number_of_children, get_childid, get_child_edges, get_child_gridspacing
    760 
    761 
     774    PUBLIC pmci_ensure_nest_mass_conservation
     775    PUBLIC pmci_ensure_nest_mass_conservation_vertical
     776   
    762777 CONTAINS
    763778
     
    15491564!
    15501565!--    Check that the child and parent grid lines do match
    1551        CALL pmci_check_grid_matching
    1552 
     1566       CALL pmci_check_grid_matching
     1567!       
     1568!--    Compute surface areas of the nest-boundary faces
     1569       CALL pmci_compute_face_areas
     1570       
    15531571    ENDIF
    15541572
     
    23302348       
    23312349    END SUBROUTINE pmci_check_grid_matching
    2332      
    2333 #endif 
     2350
     2351
     2352
     2353    SUBROUTINE pmci_compute_face_areas
     2354
     2355       IMPLICIT NONE
     2356       REAL(wp)  :: face_area_local   !< Local (for the current pe) integral face area of the left boundary
     2357       REAL(wp)  :: sub_sum           !< Intermediate sum in order to improve the accuracy of the summation
     2358
     2359       INTEGER(iwp) :: i              !< Running index in the x-direction
     2360       INTEGER(iwp) :: j              !< Running index in the y-direction
     2361       INTEGER(iwp) :: k              !< Running index in the z-direction
     2362       INTEGER(iwp) :: k_wall         !< Local topography top k-index
     2363       INTEGER(iwp) :: n              !< Running index over boundary faces
     2364
     2365       
     2366!
     2367!--    Sum up the volume flow through the left boundary
     2368       face_area(1) = 0.0_wp
     2369       face_area_local = 0.0_wp
     2370       IF ( bc_dirichlet_l )  THEN
     2371          i = 0
     2372          DO  j = nys, nyn
     2373             sub_sum = 0.0_wp
     2374             k_wall = get_topography_top_index_ji( j, i, 'u' )
     2375             DO   k = k_wall + 1, nzt
     2376                sub_sum = sub_sum + dzw(k)
     2377             ENDDO
     2378             face_area_local =  face_area_local + dy * sub_sum
     2379          ENDDO
     2380       ENDIF
     2381       
     2382#if defined( __parallel )
     2383       IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     2384       CALL MPI_ALLREDUCE( face_area_local, face_area(1), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     2385#else
     2386       face_area(1) = face_area_local
     2387#endif
     2388!
     2389!--    Sum up the volume flow through the right boundary
     2390       face_area(2) = 0.0_wp
     2391       face_area_local = 0.0_wp
     2392       IF ( bc_dirichlet_r )  THEN
     2393          i = nx
     2394          DO  j = nys, nyn
     2395             sub_sum = 0.0_wp
     2396             k_wall = get_topography_top_index_ji( j, i, 'u' )
     2397             DO   k = k_wall + 1, nzt
     2398                sub_sum = sub_sum + dzw(k)
     2399             ENDDO
     2400             face_area_local =  face_area_local + dy * sub_sum
     2401          ENDDO
     2402       ENDIF
     2403       
     2404#if defined( __parallel )
     2405       IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     2406       CALL MPI_ALLREDUCE( face_area_local, face_area(2), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     2407#else
     2408       face_area(2) = face_area_local
     2409#endif
     2410!
     2411!--    Sum up the volume flow through the south boundary
     2412       face_area(3) = 0.0_wp
     2413       face_area_local = 0.0_wp
     2414       IF ( bc_dirichlet_s )  THEN
     2415          j = 0
     2416          DO  i = nxl, nxr
     2417             sub_sum = 0.0_wp
     2418             k_wall = get_topography_top_index_ji( j, i, 'v' )
     2419             DO  k = k_wall + 1, nzt
     2420                sub_sum = sub_sum + dzw(k)
     2421             ENDDO
     2422             face_area_local = face_area_local + dx * sub_sum
     2423          ENDDO
     2424       ENDIF
     2425       
     2426#if defined( __parallel )
     2427       IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     2428       CALL MPI_ALLREDUCE( face_area_local, face_area(3), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     2429#else
     2430       face_area(3) = face_area_local
     2431#endif
     2432!
     2433!--    Sum up the volume flow through the north boundary
     2434       face_area(4) = 0.0_wp
     2435       face_area_local = 0.0_wp
     2436       IF ( bc_dirichlet_n )  THEN
     2437          j = ny
     2438          DO  i = nxl, nxr
     2439             sub_sum = 0.0_wp
     2440             k_wall = get_topography_top_index_ji( j, i, 'v' )
     2441             DO  k = k_wall + 1, nzt
     2442                sub_sum = sub_sum + dzw(k)
     2443             ENDDO
     2444             face_area_local = face_area_local + dx * sub_sum
     2445          ENDDO
     2446       ENDIF
     2447       
     2448#if defined( __parallel )
     2449       IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     2450       CALL MPI_ALLREDUCE( face_area_local, face_area(4), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     2451#else
     2452       face_area(4) = face_area_local
     2453#endif
     2454!
     2455!--    The top face area does not depend on the topography at all.       
     2456       face_area(5) = ( nx + 1 ) * ( ny + 1 ) * dx * dy
     2457!
     2458!--    The 6th element is used for the total area
     2459       face_area(6) = 0.0_wp
     2460       DO  n = 1, 5
     2461          face_area(6) = face_area(6) + face_area(n)
     2462       ENDDO
     2463
     2464       write( 9, "(6(e12.5,2x))") ( face_area(n), n = 1, 6 )
     2465       flush( 9 )
     2466       
     2467    END SUBROUTINE pmci_compute_face_areas
     2468#endif
     2469   
    23342470 END SUBROUTINE pmci_setup_child
    23352471
     
    39324068      REAL(wp) ::  c_interp_1   !< Value interpolated to the flux point in x direction from the parent-grid data
    39334069      REAL(wp) ::  c_interp_2   !< Auxiliary value interpolated  to the flux point in x direction from the parent-grid data
    3934 
    39354070!
    39364071!--   Check which edge is to be handled
     
    41544289      INTEGER(iwp) ::  jcbc     !< Fixed child-grid index in the y-direction pointing to the boundary-value nodes
    41554290      INTEGER(iwp) ::  jcbgp    !< Index running over the redundant boundary ghost points in y-direction
    4156       INTEGER(iwp) ::  kc       !< Running child-grid index in the z-direction     
    4157       INTEGER(iwp) ::  kp       !< Running parent-grid index in the z-direction
    41584291      INTEGER(iwp) ::  jpbeg    !< Parent-grid index in the y-direction pointing to the starting point of workarr_sn
    41594292                                !< in the parent-grid array
     
    41624295      INTEGER(iwp) ::  jpwp     !< Reduced parent-grid index in the y-direction for workarr_sn pointing to
    41634296                                !< the first prognostic node
     4297      INTEGER(iwp) ::  kc       !< Running child-grid index in the z-direction     
     4298      INTEGER(iwp) ::  kp       !< Running parent-grid index in the z-direction
    41644299      REAL(wp) ::  cb           !< Interpolation coefficient for the boundary ghost node 
    41654300      REAL(wp) ::  cp           !< Interpolation coefficient for the first prognostic node
    41664301      REAL(wp) ::  c_interp_1   !< Value interpolated to the flux point in x direction from the parent-grid data
    41674302      REAL(wp) ::  c_interp_2   !< Auxiliary value interpolated  to the flux point in x direction from the parent-grid data
     4303
    41684304     
    41694305!
     
    49735109#endif
    49745110 END SUBROUTINE pmci_boundary_conds
    4975    
     5111
     5112
     5113 
     5114 SUBROUTINE pmci_ensure_nest_mass_conservation
     5115
     5116!
     5117!-- Adjust the volume-flow rate through the nested boundaries so that the net volume
     5118!-- flow through all boundaries of the current nest domain becomes zero.
     5119    IMPLICIT NONE
     5120
     5121    INTEGER(iwp) ::  i                        !< Running index in the x-direction
     5122    INTEGER(iwp) ::  ierr                     !< MPI error code
     5123    INTEGER(iwp) ::  j                        !< Running index in the y-direction
     5124    INTEGER(iwp) ::  k                        !< Running index in the z-direction
     5125    INTEGER(iwp) ::  n                        !< Running index over the boundary faces: l, r, s, n and t
     5126
     5127    REAL(wp) ::  dxdy                         !< Surface area of grid cell top face
     5128    REAL(wp) ::  innor                        !< Inner normal vector of the grid cell face
     5129    REAL(wp) ::  sub_sum                      !< Intermediate sum for reducing the loss of signifigant digits in 2-D summations
     5130    REAL(wp) ::  u_corr_left                  !< Correction added to the left boundary value of u
     5131    REAL(wp) ::  u_corr_right                 !< Correction added to the right boundary value of u
     5132    REAL(wp) ::  v_corr_south                 !< Correction added to the south boundary value of v
     5133    REAL(wp) ::  v_corr_north                 !< Correction added to the north boundary value of v
     5134    REAL(wp) ::  volume_flux_integral         !< Surface integral of volume flux over the domain boundaries
     5135    REAL(wp) ::  volume_flux_local            !< Surface integral of volume flux over the subdomain boundary face
     5136    REAL(wp) ::  w_corr_top                   !< Correction added to the top boundary value of w
     5137
     5138    REAL(wp), DIMENSION(5) ::  volume_flux    !< Surface integral of volume flux over each boundary face of the domain
     5139
     5140   
     5141!
     5142!-- Sum up the volume flow through the left boundary
     5143    volume_flux(1) = 0.0_wp
     5144    volume_flux_local = 0.0_wp
     5145    IF ( bc_dirichlet_l )  THEN
     5146       i = 0
     5147       innor = dy
     5148       DO   j = nys, nyn
     5149          sub_sum = 0.0_wp
     5150          DO   k = nzb+1, nzt
     5151             sub_sum = sub_sum + innor * u(k,j,i) * dzw(k)                                          &
     5152                               * MERGE( 1.0_wp, 0.0_wp, BTEST( wall_flags_0(k,j,i), 1 ) )
     5153          ENDDO
     5154          volume_flux_local = volume_flux_local + sub_sum
     5155       ENDDO
     5156    ENDIF
     5157
     5158#if defined( __parallel )
     5159    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5160    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux(1), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5161#else
     5162    volume_flux(1) = volume_flux_local
     5163#endif
     5164!
     5165!-- Sum up the volume flow through the right boundary
     5166    volume_flux(2) = 0.0_wp
     5167    volume_flux_local = 0.0_wp
     5168    IF ( bc_dirichlet_r )  THEN
     5169       i = nx + 1
     5170       innor = -dy
     5171       DO   j = nys, nyn
     5172          sub_sum = 0.0_wp
     5173          DO   k = nzb+1, nzt
     5174             sub_sum = sub_sum + innor * u(k,j,i) * dzw(k)                                          &
     5175                               * MERGE( 1.0_wp, 0.0_wp, BTEST( wall_flags_0(k,j,i), 1 ) )
     5176          ENDDO
     5177          volume_flux_local = volume_flux_local + sub_sum
     5178       ENDDO
     5179    ENDIF
     5180
     5181#if defined( __parallel )
     5182    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5183    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux(2), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5184#else
     5185    volume_flux(2) = volume_flux_local
     5186#endif
     5187!
     5188!-- Sum up the volume flow through the south boundary
     5189    volume_flux(3) = 0.0_wp   
     5190    volume_flux_local = 0.0_wp
     5191    IF ( bc_dirichlet_s )  THEN
     5192       j = 0
     5193       innor = dx
     5194       DO   i = nxl, nxr
     5195          sub_sum = 0.0_wp
     5196          DO   k = nzb+1, nzt
     5197             sub_sum = sub_sum + innor * v(k,j,i) * dzw(k)                                          &
     5198                               * MERGE( 1.0_wp, 0.0_wp, BTEST( wall_flags_0(k,j,i), 2 ) )
     5199          ENDDO
     5200          volume_flux_local = volume_flux_local + sub_sum
     5201       ENDDO
     5202    ENDIF
     5203   
     5204#if defined( __parallel )
     5205    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5206    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux(3), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5207#else
     5208    volume_flux(3) = volume_flux_local
     5209#endif
     5210!   
     5211!-- Sum up the volume flow through the north boundary
     5212    volume_flux(4) = 0.0_wp
     5213    volume_flux_local = 0.0_wp
     5214    IF ( bc_dirichlet_n )  THEN
     5215       j = ny + 1
     5216       innor = -dx
     5217       DO  i = nxl, nxr
     5218          sub_sum = 0.0_wp
     5219          DO  k = nzb+1, nzt
     5220             sub_sum = sub_sum + innor * v(k,j,i) * dzw(k)                                          &
     5221                               * MERGE( 1.0_wp, 0.0_wp, BTEST( wall_flags_0(k,j,i), 2 ) )
     5222          ENDDO
     5223          volume_flux_local = volume_flux_local + sub_sum
     5224       ENDDO
     5225    ENDIF
     5226
     5227#if defined( __parallel )
     5228    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5229    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux(4), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5230#else
     5231    volume_flux(4) = volume_flux_local
     5232#endif
     5233!
     5234!-- Sum up the volume flow through the top boundary
     5235    volume_flux(5) = 0.0_wp
     5236    volume_flux_local = 0.0_wp
     5237    dxdy = dx * dy
     5238    k = nzt
     5239    DO  i = nxl, nxr
     5240       sub_sum = 0.0_wp
     5241       DO   j = nys, nyn
     5242          sub_sum = sub_sum - w(k,j,i) * dxdy  ! Minus, because the inner unit normal vector is (0,0,-1)
     5243       ENDDO
     5244       volume_flux_local = volume_flux_local + sub_sum
     5245    ENDDO
     5246
     5247#if defined( __parallel )
     5248    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5249    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux(5), 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5250#else
     5251    volume_flux(5) = volume_flux_local
     5252#endif
     5253
     5254    volume_flux_integral = 0.0_wp
     5255    DO  n = 1, 5
     5256       volume_flux_integral = volume_flux_integral + volume_flux(n)
     5257    ENDDO
     5258!   
     5259!-- Correction equally distributed to all nest boundaries, area_total must be used as area.
     5260!-- Note that face_area(6) is the total area (=sum from 1 to 5)
     5261    w_corr_top   = volume_flux_integral / face_area(6)
     5262    u_corr_left  =-w_corr_top
     5263    u_corr_right = w_corr_top
     5264    v_corr_south =-w_corr_top
     5265    v_corr_north = w_corr_top
     5266!!
     5267!!-- Just print out the net volume fluxes through each boundary. Only the root process prints.   
     5268!    if ( myid == 0 )  then       
     5269!       write( 9, "(5(e14.7,2x),4x,e14.7,4x,e12.5,4x,5(e14.7,2x))" )                                 &
     5270!            volume_flux(1), volume_flux(2), volume_flux(3), volume_flux(4), volume_flux(5),         &
     5271!            volume_flux_integral, c_correc,                                                         &
     5272!            u_corr_left, u_corr_right,  v_corr_south, v_corr_north, w_corr_top
     5273!       flush( 9 )
     5274!    endif   
     5275!
     5276!-- Correct the top-boundary value of w
     5277    DO   i = nxl, nxr
     5278       DO   j = nys, nyn
     5279          DO  k = nzt, nzt + 1
     5280             w(k,j,i) = w(k,j,i) + w_corr_top
     5281          ENDDO
     5282       ENDDO
     5283    ENDDO
     5284!
     5285!-- Correct the left-boundary value of u
     5286    IF ( bc_dirichlet_l )  THEN
     5287       DO  i = nxlg, nxl
     5288          DO  j = nys, nyn
     5289             DO  k = nzb + 1, nzt
     5290                u(k,j,i) = u(k,j,i) + u_corr_left
     5291             ENDDO
     5292          ENDDO
     5293       ENDDO
     5294    ENDIF
     5295!
     5296!-- Correct the right-boundary value of u
     5297    IF ( bc_dirichlet_r )  THEN
     5298       DO  i = nxr+1, nxrg
     5299          DO  j = nys, nyn
     5300             DO  k = nzb + 1, nzt
     5301                u(k,j,i) = u(k,j,i) + u_corr_right
     5302             ENDDO
     5303          ENDDO
     5304       ENDDO
     5305    ENDIF
     5306!
     5307!-- Correct the south-boundary value of v
     5308    IF ( bc_dirichlet_s )  THEN
     5309       DO  i = nxl, nxr
     5310          DO  j = nysg, nys
     5311             DO  k = nzb + 1, nzt
     5312                v(k,j,i) = v(k,j,i) + v_corr_south
     5313             ENDDO
     5314          ENDDO
     5315       ENDDO
     5316    ENDIF
     5317!
     5318!-- Correct the north-boundary value of v
     5319    IF ( bc_dirichlet_n )  THEN
     5320       DO  i = nxl, nxr
     5321          DO  j = nyn+1, nyng
     5322             DO  k = nzb + 1, nzt
     5323                v(k,j,i) = v(k,j,i) + v_corr_north
     5324             ENDDO
     5325          ENDDO
     5326       ENDDO
     5327    ENDIF
     5328   
     5329   
     5330 END SUBROUTINE pmci_ensure_nest_mass_conservation
     5331
     5332
     5333 
     5334 SUBROUTINE pmci_ensure_nest_mass_conservation_vertical
     5335
     5336!
     5337!-- Adjust the volume-flow rate through the top boundary so that the net volume
     5338!-- flow through all boundaries of the current nest domain becomes zero.
     5339    IMPLICIT NONE
     5340
     5341    INTEGER(iwp) ::  i                        !< Running index in the x-direction
     5342    INTEGER(iwp) ::  ierr                     !< MPI error code
     5343    INTEGER(iwp) ::  j                        !< Running index in the y-direction
     5344    INTEGER(iwp) ::  k                        !< Running index in the z-direction
     5345
     5346    REAL(wp) ::  dxdy                         !< Surface area of grid cell top face
     5347    REAL(wp) ::  innor                        !< Inner normal vector of the grid cell face
     5348    REAL(wp) ::  sub_sum                      !< Intermediate sum for reducing the loss of signifigant digits in 2-D summations
     5349    REAL(wp) ::  top_area                     !< Top boundary face area
     5350    REAL(wp) ::  volume_flux                  !< Surface integral of volume flux over the top boundary face
     5351    REAL(wp) ::  volume_flux_local            !< Surface integral of volume flux over the subdomain boundary face
     5352    REAL(wp) ::  w_corr_top                   !< Correction added to the top boundary value of w
     5353
     5354!   
     5355!-- The computation of the areas will be moved to initialization and made properly there.
     5356    top_area = face_area(5)
     5357!
     5358!-- Sum up the volume flow through the top boundary
     5359    volume_flux = 0.0_wp
     5360    volume_flux_local = 0.0_wp
     5361    dxdy = dx * dy
     5362    k = nzt
     5363    DO  i = nxl, nxr
     5364       sub_sum = 0.0_wp
     5365       DO   j = nys, nyn
     5366          sub_sum = sub_sum - w(k,j,i) * dxdy  ! Minus, because the inner unit normal vector is (0,0,-1)
     5367       ENDDO
     5368       volume_flux_local = volume_flux_local + sub_sum
     5369    ENDDO
     5370
     5371#if defined( __parallel )
     5372    IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
     5373    CALL MPI_ALLREDUCE( volume_flux_local, volume_flux, 1, MPI_REAL, MPI_SUM, comm2d, ierr )
     5374#else
     5375    volume_flux = volume_flux_local
     5376#endif
     5377
     5378    w_corr_top   = volume_flux / top_area
     5379!!
     5380!!-- Just print out the net volume fluxes through each boundary. Only the root process prints.   
     5381!    if ( myid == 0 )  then       
     5382!       write( 9, "(5(e14.7,2x),4x,e14.7,4x,e12.5,4x,5(e14.7,2x))" )                                 &
     5383!            volume_flux(1), volume_flux(2), volume_flux(3), volume_flux(4), volume_flux(5),         &
     5384!            volume_flux_integral, c_correc,                                                         &
     5385!            u_corr_left, u_corr_right,  v_corr_south, v_corr_north, w_corr_top
     5386!       flush( 9 )
     5387!    endif   
     5388!
     5389!-- Correct the top-boundary value of w
     5390    DO   i = nxl, nxr
     5391       DO   j = nys, nyn
     5392          DO  k = nzt, nzt + 1
     5393             w(k,j,i) = w(k,j,i) + w_corr_top
     5394          ENDDO
     5395       ENDDO
     5396    ENDDO
     5397   
     5398 END SUBROUTINE pmci_ensure_nest_mass_conservation_vertical
     5399
     5400 
    49765401END MODULE pmc_interface
  • palm/trunk/SOURCE/time_integration.f90

    r3994 r4010  
    2525! -----------------
    2626! $Id$
     27! Mass (volume) flux correction included to ensure global mass conservation for child domains.
     28!
     29! 3994 2019-05-22 18:08:09Z suehring
    2730! output of turbulence intensity added
    2831!
     
    615618
    616619    USE pmc_interface,                                                                             &
    617         ONLY:  nested_run, nesting_mode, pmci_boundary_conds, pmci_datatrans, pmci_synchronize
     620        ONLY:  nested_run, nesting_mode, pmci_boundary_conds, pmci_datatrans, pmci_synchronize,    &
     621        pmci_ensure_nest_mass_conservation, pmci_ensure_nest_mass_conservation_vertical
    618622
    619623    USE progress_bar,                                                                              &
     
    12801284
    12811285             ELSE
    1282 
     1286!               
     1287!--             Mass (volume) flux correction to ensure global mass conservation for child domains.
     1288                IF ( child_domain )  THEN
     1289                   IF ( nesting_mode == 'vertical' )  THEN
     1290                      CALL pmci_ensure_nest_mass_conservation_vertical
     1291                   ELSE
     1292                      CALL pmci_ensure_nest_mass_conservation
     1293                   ENDIF
     1294                ENDIF
     1295               
    12831296                CALL pres
    12841297
Note: See TracChangeset for help on using the changeset viewer.