!> @file lpm_exchange_horiz.f90 !--------------------------------------------------------------------------------! ! This file is part of PALM. ! ! PALM is free software: you can redistribute it and/or modify it under the terms ! of the GNU General Public License as published by the Free Software Foundation, ! either version 3 of the License, or (at your option) any later version. ! ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. ! ! You should have received a copy of the GNU General Public License along with ! PALM. If not, see . ! ! Copyright 1997-2015 Leibniz Universitaet Hannover !--------------------------------------------------------------------------------! ! ! Current revisions: ! ------------------ ! ! ! Former revisions: ! ----------------- ! $Id: lpm_exchange_horiz.f90 1784 2016-03-06 19:14:40Z knoop $ ! ! 1783 2016-03-06 18:36:17Z raasch ! new netcdf-module included ! ! 1691 2015-10-26 16:17:44Z maronga ! Formatting corrections. ! ! 1685 2015-10-08 07:32:13Z raasch ! bugfix concerning vertical index offset in case of ocean ! ! 1682 2015-10-07 23:56:08Z knoop ! Code annotations made doxygen readable ! ! 1359 2014-04-11 17:15:14Z hoffmann ! New particle structure integrated. ! Kind definition added to all floating point numbers. ! ! 1327 2014-03-21 11:00:16Z raasch ! -netcdf output queries ! ! 1320 2014-03-20 08:40:49Z raasch ! ONLY-attribute added to USE-statements, ! kind-parameters added to all INTEGER and REAL declaration statements, ! kinds are defined in new module kinds, ! comment fields (!:) to be used for variable explanations added to ! all variable declaration statements ! ! 1318 2014-03-17 13:35:16Z raasch ! module interfaces removed ! ! 1036 2012-10-22 13:43:42Z raasch ! code put under GPL (PALM 3.9) ! ! 851 2012-03-15 14:32:58Z raasch ! Bugfix: resetting of particle_mask and tail mask moved from end of this ! routine to lpm ! ! 849 2012-03-15 10:35:09Z raasch ! initial revision (former part of advec_particles) ! ! ! Description: ! ------------ !> Exchange of particles (and tails) between the subdomains. !------------------------------------------------------------------------------! MODULE lpm_exchange_horiz_mod USE control_parameters, & ONLY: dz, message_string, simulated_time USE cpulog, & ONLY: cpu_log, log_point_s USE grid_variables, & ONLY: ddx, ddy, dx, dy USE indices, & ONLY: nx, nxl, nxr, ny, nyn, nys, nzb, nzt USE kinds USE lpm_pack_arrays_mod, & ONLY: lpm_pack_arrays USE netcdf_interface, & ONLY: netcdf_data_format USE particle_attributes, & ONLY: alloc_factor, deleted_particles, deleted_tails, grid_particles, & ibc_par_lr, ibc_par_ns, maximum_number_of_tails, & maximum_number_of_tailpoints, min_nr_particle, & mpi_particle_type, number_of_tails, number_of_particles, & offset_ocean_nzt, particles, & particle_tail_coordinates, particle_type, prt_count, & tail_mask, trlp_count_sum, & trlp_count_recv_sum, trnp_count_sum, trnp_count_recv_sum, & trrp_count_sum, trrp_count_recv_sum, trsp_count_sum, & trsp_count_recv_sum, use_particle_tails, zero_particle USE pegrid IMPLICIT NONE INTEGER(iwp), PARAMETER :: NR_2_direction_move = 10000 !< INTEGER(iwp) :: nr_move_north !< INTEGER(iwp) :: nr_move_south !< TYPE(particle_type), DIMENSION(NR_2_direction_move) :: move_also_north TYPE(particle_type), DIMENSION(NR_2_direction_move) :: move_also_south SAVE PRIVATE PUBLIC lpm_exchange_horiz, lpm_move_particle, realloc_particles_array INTERFACE lpm_exchange_horiz MODULE PROCEDURE lpm_exchange_horiz END INTERFACE lpm_exchange_horiz INTERFACE lpm_move_particle MODULE PROCEDURE lpm_move_particle END INTERFACE lpm_move_particle INTERFACE realloc_particles_array MODULE PROCEDURE realloc_particles_array END INTERFACE realloc_particles_array CONTAINS !------------------------------------------------------------------------------! ! Description: ! ------------ !> Exchange between subdomains. !> As soon as one particle has moved beyond the boundary of the domain, it !> is included in the relevant transfer arrays and marked for subsequent !> deletion on this PE. !> First sweep for crossings in x direction. Find out first the number of !> particles to be transferred and allocate temporary arrays needed to store !> them. !> For a one-dimensional decomposition along y, no transfer is necessary, !> because the particle remains on the PE, but the particle coordinate has to !> be adjusted. !------------------------------------------------------------------------------! SUBROUTINE lpm_exchange_horiz IMPLICIT NONE INTEGER(iwp) :: i !< INTEGER(iwp) :: ip !< INTEGER(iwp) :: j !< INTEGER(iwp) :: jp !< INTEGER(iwp) :: k !< INTEGER(iwp) :: kp !< INTEGER(iwp) :: n !< INTEGER(iwp) :: nn !< INTEGER(iwp) :: tlength !< INTEGER(iwp) :: trlp_count !< INTEGER(iwp) :: trlp_count_recv !< INTEGER(iwp) :: trlpt_count !< INTEGER(iwp) :: trlpt_count_recv !< INTEGER(iwp) :: trnp_count !< INTEGER(iwp) :: trnp_count_recv !< INTEGER(iwp) :: trnpt_count !< INTEGER(iwp) :: trnpt_count_recv !< INTEGER(iwp) :: trrp_count !< INTEGER(iwp) :: trrp_count_recv !< INTEGER(iwp) :: trrpt_count !< INTEGER(iwp) :: trrpt_count_recv !< INTEGER(iwp) :: trsp_count !< INTEGER(iwp) :: trsp_count_recv !< INTEGER(iwp) :: trspt_count !< INTEGER(iwp) :: trspt_count_recv !< REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: trlpt !< REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: trnpt !< REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: trrpt !< REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: trspt !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: rvlp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: rvnp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: rvrp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: rvsp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trlp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trnp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trrp !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trsp !< CALL cpu_log( log_point_s(23), 'lpm_exchange_horiz', 'start' ) #if defined( __parallel ) trlp_count = 0 trlpt_count = 0 trrp_count = 0 trrpt_count = 0 trlp_count_recv = 0 trlpt_count_recv = 0 trrp_count_recv = 0 trrpt_count_recv = 0 IF ( pdims(1) /= 1 ) THEN ! !-- First calculate the storage necessary for sending and receiving the data. !-- Compute only first (nxl) and last (nxr) loop iterration. DO ip = nxl, nxr, nxr - nxl DO jp = nys, nyn DO kp = nzb+1, nzt number_of_particles = prt_count(kp,jp,ip) IF ( number_of_particles <= 0 ) CYCLE particles => grid_particles(kp,jp,ip)%particles(1:number_of_particles) DO n = 1, number_of_particles IF ( particles(n)%particle_mask ) THEN i = ( particles(n)%x + 0.5_wp * dx ) * ddx ! !-- Above calculation does not work for indices less than zero IF ( particles(n)%x < -0.5_wp * dx ) i = -1 IF ( i < nxl ) THEN trlp_count = trlp_count + 1 IF ( particles(n)%tail_id /= 0 ) trlpt_count = trlpt_count + 1 ELSEIF ( i > nxr ) THEN trrp_count = trrp_count + 1 IF ( particles(n)%tail_id /= 0 ) trrpt_count = trrpt_count + 1 ENDIF ENDIF ENDDO ENDDO ENDDO ENDDO IF ( trlp_count == 0 ) trlp_count = 1 IF ( trlpt_count == 0 ) trlpt_count = 1 IF ( trrp_count == 0 ) trrp_count = 1 IF ( trrpt_count == 0 ) trrpt_count = 1 ALLOCATE( trlp(trlp_count), trrp(trrp_count) ) trlp = zero_particle trrp = zero_particle IF ( use_particle_tails ) THEN ALLOCATE( trlpt(maximum_number_of_tailpoints,5,trlpt_count), & trrpt(maximum_number_of_tailpoints,5,trrpt_count) ) tlength = maximum_number_of_tailpoints * 5 ENDIF trlp_count = 0 trlpt_count = 0 trrp_count = 0 trrpt_count = 0 ENDIF ! !-- Compute only first (nxl) and last (nxr) loop iterration DO ip = nxl, nxr,nxr-nxl DO jp = nys, nyn DO kp = nzb+1, nzt number_of_particles = prt_count(kp,jp,ip) IF ( number_of_particles <= 0 ) CYCLE particles => grid_particles(kp,jp,ip)%particles(1:number_of_particles) DO n = 1, number_of_particles nn = particles(n)%tail_id ! !-- Only those particles that have not been marked as 'deleted' may !-- be moved. IF ( particles(n)%particle_mask ) THEN i = ( particles(n)%x + 0.5_wp * dx ) * ddx ! !-- Above calculation does not work for indices less than zero IF ( particles(n)%x < - 0.5_wp * dx ) i = -1 IF ( i < nxl ) THEN IF ( i < 0 ) THEN ! !-- Apply boundary condition along x IF ( ibc_par_lr == 0 ) THEN ! !-- Cyclic condition IF ( pdims(1) == 1 ) THEN particles(n)%x = ( nx + 1 ) * dx + particles(n)%x particles(n)%origin_x = ( nx + 1 ) * dx + & particles(n)%origin_x IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,1,nn) = ( nx + 1 ) * dx & + particle_tail_coordinates(1:i,1,nn) ENDIF ELSE trlp_count = trlp_count + 1 trlp(trlp_count) = particles(n) trlp(trlp_count)%x = ( nx + 1 ) * dx + trlp(trlp_count)%x trlp(trlp_count)%origin_x = trlp(trlp_count)%origin_x + & ( nx + 1 ) * dx particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( trlp(trlp_count)%x >= (nx + 0.5_wp)* dx - 1.0E-12_wp ) THEN trlp(trlp_count)%x = trlp(trlp_count)%x - 1.0E-10_wp !++ why is 1 subtracted in next statement??? trlp(trlp_count)%origin_x = trlp(trlp_count)%origin_x - 1 ENDIF IF ( use_particle_tails .AND. nn /= 0 ) THEN trlpt_count = trlpt_count + 1 trlpt(:,:,trlpt_count) = particle_tail_coordinates(:,:,nn) trlpt(:,1,trlpt_count) = ( nx + 1 ) * dx + & trlpt(:,1,trlpt_count) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( ibc_par_lr == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_lr == 2 ) THEN ! !-- Particle reflection particles(n)%x = -particles(n)%x particles(n)%speed_x = -particles(n)%speed_x ENDIF ELSE ! !-- Store particle data in the transfer array, which will be !-- send to the neighbouring PE trlp_count = trlp_count + 1 trlp(trlp_count) = particles(n) particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trlpt_count = trlpt_count + 1 trlpt(:,:,trlpt_count) = particle_tail_coordinates(:,:,nn) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( i > nxr ) THEN IF ( i > nx ) THEN ! !-- Apply boundary condition along x IF ( ibc_par_lr == 0 ) THEN ! !-- Cyclic condition IF ( pdims(1) == 1 ) THEN particles(n)%x = particles(n)%x - ( nx + 1 ) * dx particles(n)%origin_x = particles(n)%origin_x - & ( nx + 1 ) * dx IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,1,nn) = - ( nx+1 ) * dx & + particle_tail_coordinates(1:i,1,nn) ENDIF ELSE trrp_count = trrp_count + 1 trrp(trrp_count) = particles(n) trrp(trrp_count)%x = trrp(trrp_count)%x - ( nx + 1 ) * dx trrp(trrp_count)%origin_x = trrp(trrp_count)%origin_x - & ( nx + 1 ) * dx particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trrpt_count = trrpt_count + 1 trrpt(:,:,trrpt_count) = particle_tail_coordinates(:,:,nn) trrpt(:,1,trrpt_count) = trrpt(:,1,trrpt_count) - & ( nx + 1 ) * dx tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( ibc_par_lr == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_lr == 2 ) THEN ! !-- Particle reflection particles(n)%x = 2 * ( nx * dx ) - particles(n)%x particles(n)%speed_x = -particles(n)%speed_x ENDIF ELSE ! !-- Store particle data in the transfer array, which will be send !-- to the neighbouring PE trrp_count = trrp_count + 1 trrp(trrp_count) = particles(n) particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trrpt_count = trrpt_count + 1 trrpt(:,:,trrpt_count) = particle_tail_coordinates(:,:,nn) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ENDIF ENDIF ENDDO ENDDO ENDDO ENDDO ! !-- Send left boundary, receive right boundary (but first exchange how many !-- and check, if particle storage must be extended) IF ( pdims(1) /= 1 ) THEN CALL MPI_SENDRECV( trlp_count, 1, MPI_INTEGER, pleft, 0, & trrp_count_recv, 1, MPI_INTEGER, pright, 0, & comm2d, status, ierr ) ALLOCATE(rvrp(MAX(1,trrp_count_recv))) CALL MPI_SENDRECV( trlp(1)%radius, max(1,trlp_count), mpi_particle_type,& pleft, 1, rvrp(1)%radius, & max(1,trrp_count_recv), mpi_particle_type, pright, 1,& comm2d, status, ierr ) IF ( trrp_count_recv > 0 ) CALL Add_particles_to_gridcell(rvrp(1:trrp_count_recv)) DEALLOCATE(rvrp) IF ( use_particle_tails ) THEN CALL MPI_SENDRECV( trlpt_count, 1, MPI_INTEGER, pleft, 0, & trrpt_count_recv, 1, MPI_INTEGER, pright, 0, & comm2d, status, ierr ) IF ( number_of_tails+trrpt_count_recv > maximum_number_of_tails ) & THEN IF ( netcdf_data_format < 3 ) THEN message_string = 'maximum_number_of_tails ' // & 'needs to be increased ' // & '&but this is not allowed wi'// & 'th netcdf_data_format < 3' CALL message( 'lpm_exch_horiz', 'PA0147', 2, 2, -1, 6, 1 ) ELSE CALL lpm_extend_tail_array( trrpt_count_recv ) ENDIF ENDIF CALL MPI_SENDRECV( trlpt(1,1,1), trlpt_count*tlength, MPI_REAL, & pleft, 1, & particle_tail_coordinates(1,1,number_of_tails+1), & trrpt_count_recv*tlength, MPI_REAL, pright, 1, & comm2d, status, ierr ) ! !-- Update the tail ids for the transferred particles nn = number_of_tails DO n = number_of_particles+1, number_of_particles+trrp_count_recv IF ( particles(n)%tail_id /= 0 ) THEN nn = nn + 1 particles(n)%tail_id = nn ENDIF ENDDO ENDIF ! !-- Send right boundary, receive left boundary CALL MPI_SENDRECV( trrp_count, 1, MPI_INTEGER, pright, 0, & trlp_count_recv, 1, MPI_INTEGER, pleft, 0, & comm2d, status, ierr ) ALLOCATE(rvlp(MAX(1,trlp_count_recv))) CALL MPI_SENDRECV( trrp(1)%radius, max(1,trrp_count), mpi_particle_type,& pright, 1, rvlp(1)%radius, & max(1,trlp_count_recv), mpi_particle_type, pleft, 1, & comm2d, status, ierr ) IF ( trlp_count_recv > 0 ) CALL Add_particles_to_gridcell(rvlp(1:trlp_count_recv)) DEALLOCATE(rvlp) IF ( use_particle_tails ) THEN CALL MPI_SENDRECV( trrpt_count, 1, MPI_INTEGER, pright, 0, & trlpt_count_recv, 1, MPI_INTEGER, pleft, 0, & comm2d, status, ierr ) IF ( number_of_tails+trlpt_count_recv > maximum_number_of_tails ) & THEN IF ( netcdf_data_format < 3 ) THEN message_string = 'maximum_number_of_tails ' // & 'needs to be increased ' // & '&but this is not allowed wi'// & 'th netcdf_data_format < 3' CALL message( 'lpm_exch_horiz', 'PA0147', 2, 2, -1, 6, 1 ) ELSE CALL lpm_extend_tail_array( trlpt_count_recv ) ENDIF ENDIF CALL MPI_SENDRECV( trrpt(1,1,1), trrpt_count*tlength, MPI_REAL, & pright, 1, & particle_tail_coordinates(1,1,number_of_tails+1), & trlpt_count_recv*tlength, MPI_REAL, pleft, 1, & comm2d, status, ierr ) ! !-- Update the tail ids for the transferred particles nn = number_of_tails DO n = number_of_particles+1, number_of_particles+trlp_count_recv IF ( particles(n)%tail_id /= 0 ) THEN nn = nn + 1 particles(n)%tail_id = nn ENDIF ENDDO ENDIF ! number_of_particles = number_of_particles + trlp_count_recv ! number_of_tails = number_of_tails + trlpt_count_recv IF ( use_particle_tails ) THEN DEALLOCATE( trlpt, trrpt ) ENDIF DEALLOCATE( trlp, trrp ) ENDIF ! !-- Check whether particles have crossed the boundaries in y direction. Note !-- that this case can also apply to particles that have just been received !-- from the adjacent right or left PE. !-- Find out first the number of particles to be transferred and allocate !-- temporary arrays needed to store them. !-- For a one-dimensional decomposition along x, no transfer is necessary, !-- because the particle remains on the PE. trsp_count = nr_move_south trspt_count = 0 trnp_count = nr_move_north trnpt_count = 0 trsp_count_recv = 0 trspt_count_recv = 0 trnp_count_recv = 0 trnpt_count_recv = 0 IF ( pdims(2) /= 1 ) THEN ! !-- First calculate the storage necessary for sending and receiving the !-- data DO ip = nxl, nxr DO jp = nys, nyn, nyn-nys !compute only first (nys) and last (nyn) loop iterration DO kp = nzb+1, nzt number_of_particles = prt_count(kp,jp,ip) IF ( number_of_particles <= 0 ) CYCLE particles => grid_particles(kp,jp,ip)%particles(1:number_of_particles) DO n = 1, number_of_particles IF ( particles(n)%particle_mask ) THEN j = ( particles(n)%y + 0.5_wp * dy ) * ddy ! !-- Above calculation does not work for indices less than zero IF ( particles(n)%y < -0.5_wp * dy ) j = -1 IF ( j < nys ) THEN trsp_count = trsp_count + 1 IF ( particles(n)%tail_id /= 0 ) trspt_count = trspt_count + 1 ELSEIF ( j > nyn ) THEN trnp_count = trnp_count + 1 IF ( particles(n)%tail_id /= 0 ) trnpt_count = trnpt_count + 1 ENDIF ENDIF ENDDO ENDDO ENDDO ENDDO IF ( trsp_count == 0 ) trsp_count = 1 IF ( trspt_count == 0 ) trspt_count = 1 IF ( trnp_count == 0 ) trnp_count = 1 IF ( trnpt_count == 0 ) trnpt_count = 1 ALLOCATE( trsp(trsp_count), trnp(trnp_count) ) trsp = zero_particle trnp = zero_particle IF ( use_particle_tails ) THEN ALLOCATE( trspt(maximum_number_of_tailpoints,5,trspt_count), & trnpt(maximum_number_of_tailpoints,5,trnpt_count) ) tlength = maximum_number_of_tailpoints * 5 ENDIF trsp_count = nr_move_south trspt_count = 0 trnp_count = nr_move_north trnpt_count = 0 trsp(1:nr_move_south) = move_also_south(1:nr_move_south) trnp(1:nr_move_north) = move_also_north(1:nr_move_north) ENDIF DO ip = nxl, nxr DO jp = nys, nyn, nyn-nys ! compute only first (nys) and last (nyn) loop iterration DO kp = nzb+1, nzt number_of_particles = prt_count(kp,jp,ip) IF ( number_of_particles <= 0 ) CYCLE particles => grid_particles(kp,jp,ip)%particles(1:number_of_particles) DO n = 1, number_of_particles nn = particles(n)%tail_id ! !-- Only those particles that have not been marked as 'deleted' may !-- be moved. IF ( particles(n)%particle_mask ) THEN j = ( particles(n)%y + 0.5_wp * dy ) * ddy ! !-- Above calculation does not work for indices less than zero IF ( particles(n)%y < -0.5_wp * dy ) j = -1 IF ( j < nys ) THEN IF ( j < 0 ) THEN ! !-- Apply boundary condition along y IF ( ibc_par_ns == 0 ) THEN ! !-- Cyclic condition IF ( pdims(2) == 1 ) THEN particles(n)%y = ( ny + 1 ) * dy + particles(n)%y particles(n)%origin_y = ( ny + 1 ) * dy + & particles(n)%origin_y IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,2,nn) = & ( ny+1 ) * dy + particle_tail_coordinates(1:i,2,nn) ENDIF ELSE trsp_count = trsp_count + 1 trsp(trsp_count) = particles(n) trsp(trsp_count)%y = ( ny + 1 ) * dy + & trsp(trsp_count)%y trsp(trsp_count)%origin_y = trsp(trsp_count)%origin_y & + ( ny + 1 ) * dy particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( trsp(trsp_count)%y >= (ny+0.5_wp)* dy - 1.0E-12_wp ) THEN trsp(trsp_count)%y = trsp(trsp_count)%y - 1.0E-10_wp !++ why is 1 subtracted in next statement??? trsp(trsp_count)%origin_y = & trsp(trsp_count)%origin_y - 1 ENDIF IF ( use_particle_tails .AND. nn /= 0 ) THEN trspt_count = trspt_count + 1 trspt(:,:,trspt_count) = & particle_tail_coordinates(:,:,nn) trspt(:,2,trspt_count) = ( ny + 1 ) * dy + & trspt(:,2,trspt_count) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( ibc_par_ns == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_ns == 2 ) THEN ! !-- Particle reflection particles(n)%y = -particles(n)%y particles(n)%speed_y = -particles(n)%speed_y ENDIF ELSE ! !-- Store particle data in the transfer array, which will !-- be send to the neighbouring PE trsp_count = trsp_count + 1 trsp(trsp_count) = particles(n) particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trspt_count = trspt_count + 1 trspt(:,:,trspt_count) = particle_tail_coordinates(:,:,nn) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( j > nyn ) THEN IF ( j > ny ) THEN ! !-- Apply boundary condition along x IF ( ibc_par_ns == 0 ) THEN ! !-- Cyclic condition IF ( pdims(2) == 1 ) THEN particles(n)%y = particles(n)%y - ( ny + 1 ) * dy particles(n)%origin_y = & particles(n)%origin_y - ( ny + 1 ) * dy IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,2,nn) = & - (ny+1) * dy + particle_tail_coordinates(1:i,2,nn) ENDIF ELSE trnp_count = trnp_count + 1 trnp(trnp_count) = particles(n) trnp(trnp_count)%y = & trnp(trnp_count)%y - ( ny + 1 ) * dy trnp(trnp_count)%origin_y = & trnp(trnp_count)%origin_y - ( ny + 1 ) * dy particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trnpt_count = trnpt_count + 1 trnpt(:,:,trnpt_count) = & particle_tail_coordinates(:,:,nn) trnpt(:,2,trnpt_count) = & trnpt(:,2,trnpt_count) - ( ny + 1 ) * dy tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ELSEIF ( ibc_par_ns == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_ns == 2 ) THEN ! !-- Particle reflection particles(n)%y = 2 * ( ny * dy ) - particles(n)%y particles(n)%speed_y = -particles(n)%speed_y ENDIF ELSE ! !-- Store particle data in the transfer array, which will !-- be send to the neighbouring PE trnp_count = trnp_count + 1 trnp(trnp_count) = particles(n) particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN trnpt_count = trnpt_count + 1 trnpt(:,:,trnpt_count) = particle_tail_coordinates(:,:,nn) tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ENDIF ENDIF ENDIF ENDDO ENDDO ENDDO ENDDO ! !-- Send front boundary, receive back boundary (but first exchange how many !-- and check, if particle storage must be extended) IF ( pdims(2) /= 1 ) THEN CALL MPI_SENDRECV( trsp_count, 1, MPI_INTEGER, psouth, 0, & trnp_count_recv, 1, MPI_INTEGER, pnorth, 0, & comm2d, status, ierr ) ALLOCATE(rvnp(MAX(1,trnp_count_recv))) CALL MPI_SENDRECV( trsp(1)%radius, trsp_count, mpi_particle_type, & psouth, 1, rvnp(1)%radius, & trnp_count_recv, mpi_particle_type, pnorth, 1, & comm2d, status, ierr ) IF ( trnp_count_recv > 0 ) CALL Add_particles_to_gridcell(rvnp(1:trnp_count_recv)) DEALLOCATE(rvnp) IF ( use_particle_tails ) THEN CALL MPI_SENDRECV( trspt_count, 1, MPI_INTEGER, psouth, 0, & trnpt_count_recv, 1, MPI_INTEGER, pnorth, 0, & comm2d, status, ierr ) IF ( number_of_tails+trnpt_count_recv > maximum_number_of_tails ) & THEN IF ( netcdf_data_format < 3 ) THEN message_string = 'maximum_number_of_tails ' // & 'needs to be increased ' // & '&but this is not allowed wi' // & 'th netcdf_data_format < 3' CALL message( 'lpm_exch_horiz', 'PA0147', 2, 2, -1, 6, 1 ) ELSE CALL lpm_extend_tail_array( trnpt_count_recv ) ENDIF ENDIF CALL MPI_SENDRECV( trspt(1,1,1), trspt_count*tlength, MPI_REAL, & psouth, 1, & particle_tail_coordinates(1,1,number_of_tails+1), & trnpt_count_recv*tlength, MPI_REAL, pnorth, 1, & comm2d, status, ierr ) ! !-- Update the tail ids for the transferred particles nn = number_of_tails DO n = number_of_particles+1, number_of_particles+trnp_count_recv IF ( particles(n)%tail_id /= 0 ) THEN nn = nn + 1 particles(n)%tail_id = nn ENDIF ENDDO ENDIF ! number_of_particles = number_of_particles + trnp_count_recv ! number_of_tails = number_of_tails + trnpt_count_recv ! !-- Send back boundary, receive front boundary CALL MPI_SENDRECV( trnp_count, 1, MPI_INTEGER, pnorth, 0, & trsp_count_recv, 1, MPI_INTEGER, psouth, 0, & comm2d, status, ierr ) ALLOCATE(rvsp(MAX(1,trsp_count_recv))) CALL MPI_SENDRECV( trnp(1)%radius, trnp_count, mpi_particle_type, & pnorth, 1, rvsp(1)%radius, & trsp_count_recv, mpi_particle_type, psouth, 1, & comm2d, status, ierr ) IF ( trsp_count_recv > 0 ) CALL Add_particles_to_gridcell(rvsp(1:trsp_count_recv)) DEALLOCATE(rvsp) IF ( use_particle_tails ) THEN CALL MPI_SENDRECV( trnpt_count, 1, MPI_INTEGER, pnorth, 0, & trspt_count_recv, 1, MPI_INTEGER, psouth, 0, & comm2d, status, ierr ) IF ( number_of_tails+trspt_count_recv > maximum_number_of_tails ) & THEN IF ( netcdf_data_format < 3 ) THEN message_string = 'maximum_number_of_tails ' // & 'needs to be increased ' // & '&but this is not allowed wi'// & 'th NetCDF output switched on' CALL message( 'lpm_exch_horiz', 'PA0147', 2, 2, -1, 6, 1 ) ELSE CALL lpm_extend_tail_array( trspt_count_recv ) ENDIF ENDIF CALL MPI_SENDRECV( trnpt(1,1,1), trnpt_count*tlength, MPI_REAL, & pnorth, 1, & particle_tail_coordinates(1,1,number_of_tails+1), & trspt_count_recv*tlength, MPI_REAL, psouth, 1, & comm2d, status, ierr ) ! !-- Update the tail ids for the transferred particles nn = number_of_tails DO n = number_of_particles+1, number_of_particles+trsp_count_recv IF ( particles(n)%tail_id /= 0 ) THEN nn = nn + 1 particles(n)%tail_id = nn ENDIF ENDDO ENDIF number_of_particles = number_of_particles + trsp_count_recv number_of_tails = number_of_tails + trspt_count_recv IF ( use_particle_tails ) THEN DEALLOCATE( trspt, trnpt ) ENDIF DEALLOCATE( trsp, trnp ) ENDIF #else ! !-- Apply boundary conditions DO n = 1, number_of_particles nn = particles(n)%tail_id IF ( particles(n)%x < -0.5_wp * dx ) THEN IF ( ibc_par_lr == 0 ) THEN ! !-- Cyclic boundary. Relevant coordinate has to be changed. particles(n)%x = ( nx + 1 ) * dx + particles(n)%x IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,1,nn) = ( nx + 1 ) * dx + & particle_tail_coordinates(1:i,1,nn) ENDIF ELSEIF ( ibc_par_lr == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_lr == 2 ) THEN ! !-- Particle reflection particles(n)%x = -dx - particles(n)%x particles(n)%speed_x = -particles(n)%speed_x ENDIF ELSEIF ( particles(n)%x >= ( nx + 0.5_wp ) * dx ) THEN IF ( ibc_par_lr == 0 ) THEN ! !-- Cyclic boundary. Relevant coordinate has to be changed. particles(n)%x = particles(n)%x - ( nx + 1 ) * dx IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,1,nn) = - ( nx + 1 ) * dx + & particle_tail_coordinates(1:i,1,nn) ENDIF ELSEIF ( ibc_par_lr == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_lr == 2 ) THEN ! !-- Particle reflection particles(n)%x = ( nx + 1 ) * dx - particles(n)%x particles(n)%speed_x = -particles(n)%speed_x ENDIF ENDIF IF ( particles(n)%y < -0.5_wp * dy ) THEN IF ( ibc_par_ns == 0 ) THEN ! !-- Cyclic boundary. Relevant coordinate has to be changed. particles(n)%y = ( ny + 1 ) * dy + particles(n)%y IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,2,nn) = ( ny + 1 ) * dy + & particle_tail_coordinates(1:i,2,nn) ENDIF ELSEIF ( ibc_par_ns == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_ns == 2 ) THEN ! !-- Particle reflection particles(n)%y = -dy - particles(n)%y particles(n)%speed_y = -particles(n)%speed_y ENDIF ELSEIF ( particles(n)%y >= ( ny + 0.5_wp ) * dy ) THEN IF ( ibc_par_ns == 0 ) THEN ! !-- Cyclic boundary. Relevant coordinate has to be changed. particles(n)%y = particles(n)%y - ( ny + 1 ) * dy IF ( use_particle_tails .AND. nn /= 0 ) THEN i = particles(n)%tailpoints particle_tail_coordinates(1:i,2,nn) = - ( ny + 1 ) * dy + & particle_tail_coordinates(1:i,2,nn) ENDIF ELSEIF ( ibc_par_ns == 1 ) THEN ! !-- Particle absorption particles(n)%particle_mask = .FALSE. deleted_particles = deleted_particles + 1 IF ( use_particle_tails .AND. nn /= 0 ) THEN tail_mask(nn) = .FALSE. deleted_tails = deleted_tails + 1 ENDIF ELSEIF ( ibc_par_ns == 2 ) THEN ! !-- Particle reflection particles(n)%y = ( ny + 1 ) * dy - particles(n)%y particles(n)%speed_y = -particles(n)%speed_y ENDIF ENDIF ENDDO #endif ! !-- Accumulate the number of particles transferred between the subdomains #if defined( __parallel ) trlp_count_sum = trlp_count_sum + trlp_count trlp_count_recv_sum = trlp_count_recv_sum + trlp_count_recv trrp_count_sum = trrp_count_sum + trrp_count trrp_count_recv_sum = trrp_count_recv_sum + trrp_count_recv trsp_count_sum = trsp_count_sum + trsp_count trsp_count_recv_sum = trsp_count_recv_sum + trsp_count_recv trnp_count_sum = trnp_count_sum + trnp_count trnp_count_recv_sum = trnp_count_recv_sum + trnp_count_recv #endif CALL cpu_log( log_point_s(23), 'lpm_exchange_horiz', 'stop' ) END SUBROUTINE lpm_exchange_horiz !------------------------------------------------------------------------------! ! Description: ! ------------ !> If a particle moves from one processor to another, this subroutine moves !> the corresponding elements from the particle arrays of the old grid cells !> to the particle arrays of the new grid cells. !------------------------------------------------------------------------------! SUBROUTINE Add_particles_to_gridcell (particle_array) IMPLICIT NONE INTEGER(iwp) :: ip !< INTEGER(iwp) :: jp !< INTEGER(iwp) :: kp !< INTEGER(iwp) :: n !< INTEGER(iwp) :: pindex !< LOGICAL :: pack_done !< TYPE(particle_type), DIMENSION(:), INTENT(IN) :: particle_array pack_done = .FALSE. nr_move_north = 0 nr_move_south = 0 DO n = 1, SIZE(particle_array) ip = ( particle_array(n)%x + 0.5_wp * dx ) * ddx jp = ( particle_array(n)%y + 0.5_wp * dy ) * ddy kp = particle_array(n)%z / dz + 1 + offset_ocean_nzt IF ( ip >= nxl .AND. ip <= nxr .AND. jp >= nys .AND. jp <= nyn & .AND. kp >= nzb+1 .AND. kp <= nzt) THEN ! particle stays on processor number_of_particles = prt_count(kp,jp,ip) particles => grid_particles(kp,jp,ip)%particles(1:number_of_particles) pindex = prt_count(kp,jp,ip)+1 IF( pindex > SIZE(grid_particles(kp,jp,ip)%particles) ) THEN IF ( pack_done ) THEN CALL realloc_particles_array (ip,jp,kp) ELSE CALL lpm_pack_arrays prt_count(kp,jp,ip) = number_of_particles pindex = prt_count(kp,jp,ip)+1 IF ( pindex > SIZE(grid_particles(kp,jp,ip)%particles) ) THEN CALL realloc_particles_array (ip,jp,kp) ENDIF pack_done = .TRUE. ENDIF ENDIF grid_particles(kp,jp,ip)%particles(pindex) = particle_array(n) prt_count(kp,jp,ip) = pindex ELSE IF ( jp == nys - 1 ) THEN nr_move_south = nr_move_south+1 move_also_south(nr_move_south) = particle_array(n) IF ( jp == -1 ) THEN move_also_south(nr_move_south)%y = & move_also_south(nr_move_south)%y + ( ny + 1 ) * dy move_also_south(nr_move_south)%origin_y = & move_also_south(nr_move_south)%origin_y + ( ny + 1 ) * dy ENDIF ELSEIF ( jp == nyn+1 ) THEN nr_move_north = nr_move_north+1 move_also_north(nr_move_north) = particle_array(n) IF ( jp == ny+1 ) THEN move_also_north(nr_move_north)%y = & move_also_north(nr_move_north)%y - ( ny + 1 ) * dy move_also_north(nr_move_north)%origin_y = & move_also_north(nr_move_north)%origin_y - ( ny + 1 ) * dy ENDIF ELSE WRITE(0,'(a,8i7)') 'particle out of range ',myid,ip,jp,kp,nxl,nxr,nys,nyn ENDIF ENDIF ENDDO RETURN END SUBROUTINE Add_particles_to_gridcell !------------------------------------------------------------------------------! ! Description: ! ------------ !> If a particle moves from one grid cell to another (on the current !> processor!), this subroutine moves the corresponding element from the !> particle array of the old grid cell to the particle array of the new grid !> cell. !------------------------------------------------------------------------------! SUBROUTINE lpm_move_particle IMPLICIT NONE INTEGER(iwp) :: i !< INTEGER(iwp) :: ip !< INTEGER(iwp) :: j !< INTEGER(iwp) :: jp !< INTEGER(iwp) :: k !< INTEGER(iwp) :: kp !< INTEGER(iwp) :: n !< INTEGER(iwp) :: np_old_cell !< INTEGER(iwp) :: n_start !< INTEGER(iwp) :: pindex !< LOGICAL :: pack_done !< TYPE(particle_type), DIMENSION(:), POINTER :: particles_old_cell !< CALL cpu_log( log_point_s(41), 'lpm_move_particle', 'start' ) DO ip = nxl, nxr DO jp = nys, nyn DO kp = nzb+1, nzt np_old_cell = prt_count(kp,jp,ip) IF ( np_old_cell <= 0 ) CYCLE particles_old_cell => grid_particles(kp,jp,ip)%particles(1:np_old_cell) n_start = -1 DO n = 1, np_old_cell i = ( particles_old_cell(n)%x + 0.5_wp * dx ) * ddx j = ( particles_old_cell(n)%y + 0.5_wp * dy ) * ddy k = particles_old_cell(n)%z / dz + 1 + offset_ocean_nzt ! !-- Check, if particle has moved to another grid cell. IF ( i /= ip .OR. j /= jp .OR. k /= kp ) THEN ! !-- The particle has moved to another grid cell. Now check, if !-- particle stays on the same processor. IF ( i >= nxl .AND. i <= nxr .AND. j >= nys .AND. & j <= nyn .AND. k >= nzb+1 .AND. k <= nzt) THEN ! !-- If the particle stays on the same processor, the particle !-- will be added to the particle array of the new processor. number_of_particles = prt_count(k,j,i) particles => grid_particles(k,j,i)%particles(1:number_of_particles) pindex = prt_count(k,j,i)+1 IF ( pindex > SIZE(grid_particles(k,j,i)%particles) ) & THEN n_start = n EXIT ENDIF grid_particles(k,j,i)%particles(pindex) = particles_old_cell(n) prt_count(k,j,i) = pindex particles_old_cell(n)%particle_mask = .FALSE. ENDIF ENDIF ENDDO IF ( n_start >= 0 ) THEN pack_done = .FALSE. DO n = n_start, np_old_cell i = ( particles_old_cell(n)%x + 0.5_wp * dx ) * ddx j = ( particles_old_cell(n)%y + 0.5_wp * dy ) * ddy k = particles_old_cell(n)%z / dz + 1 + offset_ocean_nzt IF ( i /= ip .OR. j /= jp .OR. k /= kp ) THEN ! !-- Particle is in different box IF ( i >= nxl .AND. i <= nxr .AND. j >= nys .AND. & j <= nyn .AND. k >= nzb+1 .AND. k <= nzt) THEN ! !-- Particle stays on processor number_of_particles = prt_count(k,j,i) particles => grid_particles(k,j,i)%particles(1:number_of_particles) pindex = prt_count(k,j,i)+1 IF ( pindex > SIZE(grid_particles(k,j,i)%particles) ) & THEN IF ( pack_done ) THEN CALL realloc_particles_array(i,j,k) pindex = prt_count(k,j,i)+1 ELSE CALL lpm_pack_arrays prt_count(k,j,i) = number_of_particles pindex = prt_count(k,j,i)+1 ! !-- If number of particles in the new grid box !-- exceeds its allocated memory, the particle array !-- will be reallocated IF ( pindex > SIZE(grid_particles(k,j,i)%particles) ) THEN CALL realloc_particles_array(i,j,k) pindex = prt_count(k,j,i)+1 ENDIF pack_done = .TRUE. ENDIF ENDIF grid_particles(k,j,i)%particles(pindex) = particles_old_cell(n) prt_count(k,j,i) = pindex particles_old_cell(n)%particle_mask = .FALSE. ENDIF ENDIF ENDDO ENDIF ENDDO ENDDO ENDDO CALL cpu_log( log_point_s(41), 'lpm_move_particle', 'stop' ) RETURN END SUBROUTINE lpm_move_particle !------------------------------------------------------------------------------! ! Description: ! ------------ !> @todo Missing subroutine description. !------------------------------------------------------------------------------! SUBROUTINE realloc_particles_array (i,j,k,size_in) IMPLICIT NONE INTEGER(iwp), INTENT(in) :: i !< INTEGER(iwp), INTENT(in) :: j !< INTEGER(iwp), INTENT(in) :: k !< INTEGER(iwp), INTENT(in), optional :: size_in !< INTEGER(iwp) :: old_size !< INTEGER(iwp) :: new_size !< TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: tmp_particles_d !< TYPE(particle_type), DIMENSION(500) :: tmp_particles_s !< old_size = SIZE(grid_particles(k,j,i)%particles) IF ( PRESENT(size_in) ) THEN new_size = size_in ELSE new_size = old_size * ( 1.0 + alloc_factor / 100.0 ) ENDIF new_size = MAX( new_size, min_nr_particle ) IF ( old_size <= 500 ) THEN tmp_particles_s(1:old_size) = grid_particles(k,j,i)%particles(1:old_size) DEALLOCATE(grid_particles(k,j,i)%particles) ALLOCATE(grid_particles(k,j,i)%particles(new_size)) grid_particles(k,j,i)%particles(1:old_size) = tmp_particles_s(1:old_size) grid_particles(k,j,i)%particles(old_size+1:new_size) = zero_particle ELSE ALLOCATE(tmp_particles_d(new_size)) tmp_particles_d(1:old_size) = grid_particles(k,j,i)%particles DEALLOCATE(grid_particles(k,j,i)%particles) ALLOCATE(grid_particles(k,j,i)%particles(new_size)) grid_particles(k,j,i)%particles(1:old_size) = tmp_particles_d(1:old_size) grid_particles(k,j,i)%particles(old_size+1:new_size) = zero_particle DEALLOCATE(tmp_particles_d) ENDIF particles => grid_particles(k,j,i)%particles(1:number_of_particles) RETURN END SUBROUTINE realloc_particles_array END MODULE lpm_exchange_horiz_mod