- Timestamp:
- Apr 13, 2018 11:22:08 AM (7 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/data_output_3d.f90
r2817 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: missing parallel cpp-directives added 28 ! 29 ! 2817 2018-02-19 16:32:21Z knoop 27 30 ! Preliminary gust module interface implemented 28 31 ! … … 283 286 !-- For netCDF4/HDF5 output, data is written in parallel into one file. 284 287 IF ( netcdf_data_format < 5 ) THEN 288 #if defined( __parallel ) 285 289 CALL check_open( 30 ) 290 #endif 286 291 IF ( myid == 0 ) CALL check_open( 106+av*10 ) 287 292 ELSE -
palm/trunk/SOURCE/header.f90
r2883 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: missing parallel cpp-directives added 28 ! 29 ! 2883 2018-03-14 08:29:10Z Giersch 27 30 ! Format of the output of dt_dopr_listing (325) has been changed 28 31 ! … … 422 425 ONLY: pcm_header 423 426 427 #if defined( __parallel ) 424 428 USE pmc_handle_communicator, & 425 429 ONLY: pmc_get_model_info 430 #endif 426 431 427 432 USE pmc_interface, & … … 608 613 IF ( nested_run ) THEN 609 614 615 #if defined( __parallel ) 610 616 WRITE ( io, 600 ) TRIM( nesting_mode ), & 611 617 TRIM( nesting_datatransfer_mode ) … … 627 633 TRIM( cpl_name ) 628 634 ENDDO 635 #endif 636 629 637 ENDIF 630 638 WRITE ( io, 99 ) -
palm/trunk/SOURCE/lpm_init.f90
r2954 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! nesting routine is only called if nesting is switched on 28 ! 29 ! 2954 2018-04-09 14:35:46Z schwenkel 27 30 ! Bugfix for particle initialization in case of ocean 28 31 ! … … 288 291 ONLY: init_kernels 289 292 293 USE pmc_interface, & 294 ONLY: nested_run 295 290 296 IMPLICIT NONE 291 297 … … 640 646 ENDIF 641 647 642 CALL pmcp_g_init648 IF ( nested_run ) CALL pmcp_g_init 643 649 644 650 ! -
palm/trunk/SOURCE/lpm_write_exchange_statistics.f90
r2841 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! nesting routine is only called if nesting is switched on 28 ! bugfix: missing parallel cpp-directives added 29 ! 30 ! 2841 2018-02-27 15:02:57Z knoop 27 31 ! Bugfix: wrong placement of include 'mpif.h' corrected, 28 32 ! kinds module added and pegrid module scope restricted … … 76 80 SUBROUTINE lpm_write_exchange_statistics 77 81 78 #if !defined( __mpifh )82 #if defined( __parallel ) && !defined( __mpifh ) 79 83 USE MPI 80 84 #endif … … 100 104 ONLY: comm2d, ierr, pleft, pright, psouth, pnorth 101 105 106 USE pmc_interface, & 107 ONLY: nested_run 108 102 109 IMPLICIT NONE 103 110 104 #if defined( __ mpifh )111 #if defined( __parallel ) && defined( __mpifh ) 105 112 INCLUDE "mpif.h" 106 113 #endif … … 140 147 141 148 IF ( number_of_particles > 0 ) THEN 142 WRITE(9,*) 'number_of_particles ', number_of_particles, current_timestep_number + 1, simulated_time + dt_3d 149 WRITE(9,*) 'number_of_particles ', number_of_particles, & 150 current_timestep_number + 1, simulated_time + dt_3d 143 151 ENDIF 144 152 145 153 #if defined( __parallel ) 146 CALL MPI_ALLREDUCE( number_of_particles, tot_number_of_particles, 1, MPI_INTEGER,&147 MPI_SUM, comm2d, ierr)154 CALL MPI_ALLREDUCE( number_of_particles, tot_number_of_particles, 1, & 155 MPI_INTEGER, MPI_SUM, comm2d, ierr ) 148 156 #else 149 157 tot_number_of_particles = number_of_particles 150 158 #endif 151 159 152 CALL pmcp_g_print_number_of_particles (simulated_time+dt_3d, tot_number_of_particles) 160 IF ( nested_run ) THEN 161 CALL pmcp_g_print_number_of_particles( simulated_time+dt_3d, & 162 tot_number_of_particles) 163 ENDIF 153 164 154 165 ! -
palm/trunk/SOURCE/parin.f90
r2941 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: missing parallel cpp-directives added 28 ! 29 ! 2941 2018-04-03 11:54:58Z kanani 27 30 ! Fix for spinup in case of restart run 28 31 ! … … 691 694 !-- communicator. 692 695 !-- First, set the default: 696 #if defined( __parallel ) 693 697 CALL MPI_COMM_RANK( MPI_COMM_WORLD, global_id, ierr ) 694 698 CALL MPI_COMM_SIZE( MPI_COMM_WORLD, global_procs, ierr ) 699 #else 700 global_id = 0 701 global_procs = 1 702 #endif 695 703 IF ( maximum_parallel_io_streams == -1 .OR. & 696 704 maximum_parallel_io_streams > global_procs ) THEN -
palm/trunk/SOURCE/pmc_interface_mod.f90
r2951 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: missing parallel cpp-directives added 28 ! 29 ! 2951 2018-04-06 09:05:08Z kanani 27 30 ! Add log_point_s for pmci_model_configuration 28 31 ! … … 745 748 INTEGER(iwp) :: ncpl !< number of nest domains 746 749 750 #if defined( __parallel ) 747 751 CALL location_message( 'setup the nested model configuration', .FALSE. ) 748 752 CALL cpu_log( log_point_s(79), 'pmci_model_config', 'start' ) … … 769 773 CALL cpu_log( log_point_s(79), 'pmci_model_config', 'stop' ) 770 774 CALL location_message( 'finished', .TRUE. ) 775 #endif 771 776 772 777 END SUBROUTINE pmci_modelconfiguration … … 3330 3335 END SUBROUTINE pmci_set_array_pointer 3331 3336 3337 3332 3338 INTEGER FUNCTION get_number_of_childs () 3339 3333 3340 IMPLICIT NONE 3334 3341 3342 #if defined( __parallel ) 3335 3343 get_number_of_childs = SIZE( pmc_parent_for_child ) - 1 3344 #else 3345 get_number_of_childs = 0 3346 #endif 3336 3347 3337 3348 RETURN 3349 3338 3350 END FUNCTION get_number_of_childs 3339 3351 3352 3340 3353 INTEGER FUNCTION get_childid (id_index) 3354 3341 3355 IMPLICIT NONE 3342 3356 3343 3357 INTEGER,INTENT(IN) :: id_index 3344 3358 3359 #if defined( __parallel ) 3345 3360 get_childid = pmc_parent_for_child(id_index) 3361 #else 3362 get_childid = 0 3363 #endif 3346 3364 3347 3365 RETURN 3366 3348 3367 END FUNCTION get_childid 3368 3349 3369 3350 3370 SUBROUTINE get_child_edges (m, lx_coord, lx_coord_b, rx_coord, rx_coord_b, & -
palm/trunk/SOURCE/pmc_particle_interface.f90
r2884 r2967 26 26 ! -----------------! 27 27 ! $Id$ 28 ! bugfix: missing parallel cpp-directives added 29 ! 30 ! 2884 2018-03-14 08:33:20Z scharf 28 31 ! B: corrected KIND of variable "parsize" for some MPI calls 29 32 ! … … 47 50 USE, INTRINSIC :: ISO_C_BINDING 48 51 49 #if !defined( __mpifh )52 #if defined( __parallel ) && !defined( __mpifh ) 50 53 USE MPI 51 54 #endif … … 75 78 USE lpm_pack_and_sort_mod 76 79 80 USE lpm_exchange_horiz_mod, & 81 ONLY: realloc_particles_array 82 83 #if defined( __parallel ) 77 84 USE pmc_general, & 78 85 ONLY: pedef … … 97 104 ONLY: pmc_send_to_parent, pmc_recv_from_child 98 105 99 USE lpm_exchange_horiz_mod, & 100 ONLY: realloc_particles_array 106 #endif 101 107 102 108 IMPLICIT NONE 103 109 104 #if defined( __ mpifh )110 #if defined( __parallel ) && defined( __mpifh ) 105 111 INCLUDE "mpif.h" 106 112 #endif … … 181 187 182 188 INTEGER(iwp) :: nr_childs !< Number of child models of the current model 189 190 #if defined( __parallel ) 183 191 184 192 nr_childs = get_number_of_childs() … … 216 224 ENDIF 217 225 226 #endif 218 227 END SUBROUTINE pmcp_g_init 219 228 !------------------------------------------------------------------------------! … … 237 246 238 247 INTEGER :: parsize !< 248 TYPE(C_PTR), SAVE :: ptr !< 249 250 TYPE(particle_type),DIMENSION(:),POINTER :: win_buffer !< 251 252 INTEGER(iwp),DIMENSION(1) :: buf_shape !< 253 254 #if defined( __parallel ) 239 255 INTEGER(KIND=MPI_ADDRESS_KIND) :: parsize_mpi_address_kind !< 240 256 INTEGER(KIND=MPI_ADDRESS_KIND) :: winsize !< 241 TYPE(C_PTR), SAVE :: ptr !<242 243 TYPE(particle_type),DIMENSION(:),POINTER :: win_buffer !<244 245 INTEGER(iwp),DIMENSION(1) :: buf_shape !<246 257 247 258 ! … … 292 303 ENDIF 293 304 305 #endif 294 306 END SUBROUTINE pmcp_g_alloc_win 307 308 295 309 !------------------------------------------------------------------------------! 296 310 ! Description: … … 313 327 314 328 INTEGER :: parsize !< 329 330 #if defined( __parallel ) 331 TYPE(pedef), POINTER :: ape !< TO_DO Klaus: give a description and better name of the variable 332 315 333 INTEGER(KIND=MPI_ADDRESS_KIND) :: parsize_mpi_address_kind !< 316 334 INTEGER(KIND=MPI_ADDRESS_KIND) :: target_disp !< 317 318 TYPE(pedef), POINTER :: ape !< TO_DO Klaus: give a description and better name of the variable319 335 320 336 IF ( cpl_id > 1 ) THEN … … 361 377 ENDIF 362 378 379 #endif 363 380 END SUBROUTINE pmcp_c_get_particle_from_parent 381 382 364 383 !------------------------------------------------------------------------------! 365 384 ! Description: … … 388 407 389 408 INTEGER :: parsize !< 390 INTEGER(KIND=MPI_ADDRESS_KIND) :: parsize_mpi_address_kind !<391 INTEGER(KIND=MPI_ADDRESS_KIND) :: target_disp !<392 409 393 410 REAL(wp) :: eps=0.00001 !< used in calculations to avoid rounding errors … … 395 412 REAL(wp) :: yy !< number of fine grid cells inside a coarse grid cell in y-direction 396 413 414 ! TYPE(particle_type) :: dummy_part !< dummy particle (needed for size calculations) 415 416 #if defined( __parallel ) 397 417 TYPE(pedef), POINTER :: ape !< TO_DO Klaus: give a description and better name of the variable 398 399 ! TYPE(particle_type) :: dummy_part !< dummy particle (needed for size calculations) 418 419 INTEGER(KIND=MPI_ADDRESS_KIND) :: parsize_mpi_address_kind !< 420 INTEGER(KIND=MPI_ADDRESS_KIND) :: target_disp !< 400 421 401 422 … … 473 494 ENDIF 474 495 496 #endif 475 497 END SUBROUTINE pmcp_c_send_particle_to_parent 498 499 476 500 !------------------------------------------------------------------------------! 477 501 ! Description: … … 522 546 INTEGER(iwp),DIMENSION(1) :: buf_shape !< 523 547 548 #if defined( __parallel ) 524 549 TYPE(pedef), POINTER :: ape !< TO_DO Klaus: give a description and better name of the variable 525 550 … … 612 637 lfirst = .FALSE. 613 638 639 #endif 614 640 END SUBROUTINE pmcp_p_fill_particle_win 641 615 642 616 643 !------------------------------------------------------------------------------! … … 621 648 !------------------------------------------------------------------------------! 622 649 SUBROUTINE pmcp_p_empty_particle_win 650 623 651 IMPLICIT NONE 624 652 … … 629 657 INTEGER(iwp),DIMENSION(1) :: buf_shape !< 630 658 659 #if defined( __parallel ) 631 660 DO m = 1, get_number_of_childs() 632 661 … … 651 680 ENDDO 652 681 682 #endif 653 683 END SUBROUTINE pmcp_p_empty_particle_win 654 684 685 655 686 !------------------------------------------------------------------------------! 656 687 ! Description: … … 689 720 REAL(wp) :: z !< particle position 690 721 722 #if defined( __parallel ) 691 723 DO m = 1, get_number_of_childs() 692 724 CALL get_child_edges( m, lx_coord, lx_coord_b, rx_coord, rx_coord_b, & … … 725 757 ENDDO 726 758 759 #endif 727 760 END SUBROUTINE pmcp_p_delete_particles_in_fine_grid_area 761 762 728 763 !------------------------------------------------------------------------------! 729 764 ! Description: … … 755 790 INTEGER(iwp),DIMENSION(2) :: ivals !< integer value to be send 756 791 757 792 #if defined( __parallel ) 758 793 child_nr_particles = 0 759 794 IF ( myid == 0 ) THEN … … 779 814 ENDIF 780 815 816 #endif 781 817 END SUBROUTINE pmcp_g_print_number_of_particles 818 782 819 783 820 !------------------------------------------------------------------------------! … … 810 847 TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: tmp_particles_d !< 811 848 849 #if defined( __parallel ) 812 850 with_copy_lo = .FALSE. 813 851 IF ( PRESENT( with_copy ) ) with_copy_lo = with_copy … … 843 881 ENDIF 844 882 883 #endif 845 884 END SUBROUTINE check_and_alloc_coarse_particle 885 846 886 847 887 !------------------------------------------------------------------------------! … … 873 913 REAL(wp) :: zc !< child z coordinate 874 914 915 #if defined( __parallel ) 875 916 ! 876 917 !-- Child domain boundaries in the parent index space … … 914 955 ENDDO 915 956 957 #endif 916 958 END SUBROUTINE c_copy_particle_to_child_grid 959 960 917 961 !------------------------------------------------------------------------------! 918 962 ! Description: … … 949 993 REAL(iwp) :: z !< z coordinate 950 994 995 #if defined( __parallel ) 951 996 ! 952 997 !-- Child domain boundaries in the parent index space … … 1025 1070 CALL lpm_sort_in_subboxes 1026 1071 1072 #endif 1027 1073 END SUBROUTINE c_copy_particle_to_coarse_grid 1074 1075 1028 1076 !------------------------------------------------------------------------------! 1029 1077 ! Description: … … 1054 1102 INTEGER(iwp),DIMENSION(1) :: buf_shape !< 1055 1103 1104 #if defined( __parallel ) 1056 1105 buf_shape(1) = max_nr_particle_in_rma_win 1057 1106 CALL C_F_POINTER( buf_ptr(m), particle_in_win , buf_shape ) … … 1090 1139 ENDDO 1091 1140 ENDDO 1092 1141 1142 #endif 1093 1143 END SUBROUTINE p_copy_particle_to_org_grid 1094 1144 -
palm/trunk/SOURCE/radiation_model_mod.f90
r2964 r2967 28 28 ! ----------------- 29 29 ! $Id$ 30 ! bugfix: missing parallel cpp-directives added 31 ! 32 ! 2964 2018-04-12 16:04:03Z Giersch 30 33 ! Error message PA0491 has been introduced which could be previously found in 31 34 ! check_open. The variable numprocs_previous_run is only known in case of … … 5964 5967 ENDIF 5965 5968 5969 #if defined( __parallel ) 5966 5970 CALL MPI_BARRIER( comm2d, ierr ) 5971 #endif 5967 5972 ! CALL radiation_write_debug_log( 'End of radiation_calc_svf (after mpi_barrier)' ) 5968 5973 … … 6223 6228 INTEGER(iwp) :: ip !< number of processor where gridbox reside 6224 6229 INTEGER(iwp) :: ig !< 1D index of gridbox in global 2D array 6225 INTEGER(MPI_ADDRESS_KIND) :: wdisp !< RMA window displacement6226 6230 INTEGER(iwp) :: wcount !< RMA window item count 6227 6231 INTEGER(iwp) :: maxboxes !< max no of CSF created … … 6241 6245 REAL(wp), PARAMETER :: grow_factor = 1.5_wp !< factor of expansion of grow arrays 6242 6246 6247 #if defined( __parallel ) 6248 INTEGER(MPI_ADDRESS_KIND) :: wdisp !< RMA window displacement 6249 #endif 6243 6250 6244 6251 yxorigin(:) = origin(2:3) -
palm/trunk/SOURCE/synthetic_turbulence_generator_mod.f90
r2946 r2967 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: missing parallel cpp-directives added 28 ! 29 ! 2946 2018-04-04 17:01:23Z suehring 27 30 ! Remove unused module load 28 31 ! … … 125 128 USE kinds 126 129 127 #if !defined( __mpifh )130 #if defined( __parallel ) && !defined( __mpifh ) 128 131 USE MPI 129 132 #endif … … 138 141 IMPLICIT NONE 139 142 140 #if defined( __ mpifh )143 #if defined( __parallel ) && defined( __mpifh ) 141 144 INCLUDE "mpif.h" 142 145 #endif
Note: See TracChangeset
for help on using the changeset viewer.