!> @file restart_data_mpi_io_mod.f90 !------------------------------------------------------------------------------! ! This file is part of the PALM model system. ! ! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General ! Public License as published by the Free Software Foundation, either version 3 of the License, or ! (at your option) any later version. ! ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the ! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General ! Public License for more details. ! ! You should have received a copy of the GNU General Public License along with PALM. If not, see ! . ! ! Copyright 1997-2020 Leibniz Universitaet Hannover ! -------------------------------------------------------------------------------------------------! ! ! Current revisions: ! ----------------- ! ! ! Former revisions: ! ----------------- ! $Id: restart_data_mpi_io_mod.f90 4500 2020-04-17 10:12:45Z eckhard $ ! Fix too long lines ! ! 4498 2020-04-15 14:26:31Z raasch ! bugfix for creation of filetypes, argument removed from rd_mpi_io_open ! ! 4497 2020-04-15 10:20:51Z raasch ! last bugfix deactivated because of compile problems ! ! 4496 2020-04-15 08:37:26Z raasch ! problem with posix read arguments for surface data fixed ! ! 4495 2020-04-13 20:11:20Z raasch ! Initial version (K. Ketelsen), adjusted to PALM formatting standards (s. Raasch) ! ! ! ! Description: ! ------------ !> Routines for restart data handling using MPI-IO. !--------------------------------------------------------------------------------------------------! MODULE restart_data_mpi_io_mod #if defined( __parallel ) #if defined( __mpifh ) INCLUDE "mpif.h" #else USE MPI #endif #else USE posix_interface, & ONLY: posix_close, posix_lseek, posix_open, posix_read, posix_write #endif USE, INTRINSIC :: ISO_C_BINDING USE control_parameters, & ONLY: include_total_domain_boundaries USE exchange_horiz_mod, & ONLY: exchange_horiz, exchange_horiz_2d USE indices, & ONLY: nbgp, nnx, nny, nx, nxl, nxlg, nxr, nxrg, ny, nyn, nyng, nys, nysg, nz, nzb, nzt USE kinds USE pegrid, & ONLY: comm1dx, comm1dy, comm2d, myid, myidx, myidy, npex, npey, numprocs, pdims IMPLICIT NONE LOGICAL :: all_pes_write !< all PEs have data to write LOGICAL :: filetypes_created !< LOGICAL :: print_header_now = .TRUE. !< LOGICAL :: rd_flag !< file is opened for read LOGICAL :: wr_flag !< file is opened for write #if defined( __parallel ) INTEGER(iwp) :: ierr !< error status of MPI-calls INTEGER(iwp), PARAMETER :: rd_offset_kind = MPI_OFFSET_KIND !< Adress or Offset kind INTEGER(iwp), PARAMETER :: rd_status_size = MPI_STATUS_SIZE !< #else INTEGER(iwp), PARAMETER :: rd_offset_kind = C_SIZE_T !< INTEGER(iwp), PARAMETER :: rd_status_size = 1 !< Not required in sequential mode #endif INTEGER(iwp) :: debug_level = 1 !< TODO: replace with standard debug output steering INTEGER(iwp) :: fh !< MPI-IO file handle INTEGER(iwp) :: ft_surf = -1 !< MPI filetype surface data #if defined( __parallel ) INTEGER(iwp) :: ft_2di_nb !< MPI filetype 2D array INTEGER no outer boundary INTEGER(iwp) :: ft_2d !< MPI filetype 2D array REAL with outer boundaries INTEGER(iwp) :: ft_3d !< MPI filetype 3D array REAL with outer boundaries INTEGER(iwp) :: ft_3dsoil !< MPI filetype for 3d-soil array #endif INTEGER(iwp) :: glo_start !< global start index on this PE INTEGER(iwp) :: nr_val !< local number of values in x and y direction INTEGER(iwp) :: total_number_of_surface_values !< total number of values for one variable INTEGER(KIND=rd_offset_kind) :: array_position !< INTEGER(KIND=rd_offset_kind) :: header_position !< INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: m_end_index !< INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: m_start_index !< INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: m_global_start !< REAL(KIND=wp) :: mb_processed !< ! !-- Handling of outer boundaries TYPE local_boundaries INTEGER(iwp) :: nnx INTEGER(iwp) :: nny INTEGER(iwp) :: nx INTEGER(iwp) :: nxl INTEGER(iwp) :: nxr INTEGER(iwp) :: ny INTEGER(iwp) :: nyn INTEGER(iwp) :: nys END TYPE local_boundaries TYPE(local_boundaries) :: lb !< ! !-- General Header (first 32 byte in restart file) TYPE general_header INTEGER(iwp) :: nr_int !< number of INTEGER entries in header INTEGER(iwp) :: nr_char !< number of Text strings entries in header INTEGER(iwp) :: nr_real !< number of REAL entries in header INTEGER(iwp) :: nr_arrays !< number of arrays in restart files INTEGER(iwp) :: total_nx !< total number of points in x-direction INTEGER(iwp) :: total_ny !< total number of points in y-direction INTEGER(iwp) :: i_outer_bound !< if 1, outer boundaries are stored in restart file INTEGER(iwp) :: endian !< little endian (1) or big endian (2) internal format END TYPE general_header TYPE(general_header), TARGET :: tgh ! !-- Declaration of varibales for file header section INTEGER(KIND=rd_offset_kind) :: header_int_index INTEGER, PARAMETER :: max_nr_int=256 CHARACTER(LEN=32), DIMENSION(max_nr_int) :: int_names INTEGER(KIND=iwp), DIMENSION(max_nr_int) :: int_values INTEGER(KIND=rd_offset_kind) :: header_char_index INTEGER, PARAMETER :: max_nr_text=128 CHARACTER(LEN=128), DIMENSION(max_nr_text) :: text_lines INTEGER(KIND=rd_offset_kind) :: header_real_index INTEGER, PARAMETER :: max_nr_real=256 CHARACTER(LEN=32), DIMENSION(max_nr_real) :: real_names REAL(KIND=wp), DIMENSION(max_nr_real) :: real_values INTEGER(KIND=rd_offset_kind) :: header_arr_index INTEGER, PARAMETER :: max_nr_arrays=600 CHARACTER(LEN=32), DIMENSION(max_nr_arrays) :: array_names INTEGER(KIND=rd_offset_kind), DIMENSION(max_nr_arrays) :: array_offset SAVE PRIVATE PUBLIC mb_processed, total_number_of_surface_values ! !-- PALM interfaces INTERFACE rd_mpi_io_check_array MODULE PROCEDURE rd_mpi_io_check_array END INTERFACE rd_mpi_io_check_array INTERFACE rd_mpi_io_close MODULE PROCEDURE rd_mpi_io_close END INTERFACE rd_mpi_io_close INTERFACE rd_mpi_io_open MODULE PROCEDURE rd_mpi_io_open END INTERFACE rd_mpi_io_open INTERFACE rrd_mpi_io MODULE PROCEDURE rrd_mpi_io_char MODULE PROCEDURE rrd_mpi_io_int MODULE PROCEDURE rrd_mpi_io_int_2d MODULE PROCEDURE rrd_mpi_io_logical MODULE PROCEDURE rrd_mpi_io_real MODULE PROCEDURE rrd_mpi_io_real_2d MODULE PROCEDURE rrd_mpi_io_real_3d MODULE PROCEDURE rrd_mpi_io_real_3d_soil END INTERFACE rrd_mpi_io INTERFACE rrd_mpi_io_global_array MODULE PROCEDURE rrd_mpi_io_global_array_int_1d MODULE PROCEDURE rrd_mpi_io_global_array_real_1d MODULE PROCEDURE rrd_mpi_io_global_array_real_2d MODULE PROCEDURE rrd_mpi_io_global_array_real_3d MODULE PROCEDURE rrd_mpi_io_global_array_real_4d END INTERFACE rrd_mpi_io_global_array INTERFACE rrd_mpi_io_surface MODULE PROCEDURE rrd_mpi_io_surface MODULE PROCEDURE rrd_mpi_io_surface_2d END INTERFACE rrd_mpi_io_surface INTERFACE rd_mpi_io_surface_filetypes MODULE PROCEDURE rd_mpi_io_surface_filetypes END INTERFACE rd_mpi_io_surface_filetypes INTERFACE wrd_mpi_io MODULE PROCEDURE wrd_mpi_io_char MODULE PROCEDURE wrd_mpi_io_int MODULE PROCEDURE wrd_mpi_io_int_2d MODULE PROCEDURE wrd_mpi_io_logical MODULE PROCEDURE wrd_mpi_io_real MODULE PROCEDURE wrd_mpi_io_real_2d MODULE PROCEDURE wrd_mpi_io_real_3d MODULE PROCEDURE wrd_mpi_io_real_3d_soil END INTERFACE wrd_mpi_io INTERFACE wrd_mpi_io_global_array MODULE PROCEDURE wrd_mpi_io_global_array_int_1d MODULE PROCEDURE wrd_mpi_io_global_array_real_1d MODULE PROCEDURE wrd_mpi_io_global_array_real_2d MODULE PROCEDURE wrd_mpi_io_global_array_real_3d MODULE PROCEDURE wrd_mpi_io_global_array_real_4d END INTERFACE wrd_mpi_io_global_array INTERFACE wrd_mpi_io_surface MODULE PROCEDURE wrd_mpi_io_surface MODULE PROCEDURE wrd_mpi_io_surface_2d END INTERFACE wrd_mpi_io_surface PUBLIC rd_mpi_io_check_array, rd_mpi_io_close, rd_mpi_io_open, rrd_mpi_io, & rrd_mpi_io_global_array, rrd_mpi_io_surface, rd_mpi_io_surface_filetypes, wrd_mpi_io, & wrd_mpi_io_global_array, wrd_mpi_io_surface CONTAINS !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Open restart file for read or write with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rd_mpi_io_open( action, file_name ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: action !< CHARACTER(LEN=*), INTENT(IN) :: file_name !< INTEGER(iwp) :: i !< INTEGER(iwp) :: gh_size !< INTEGER(KIND=rd_offset_kind) :: offset !< #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status !< #endif #if ! defined( __parallel ) TYPE(C_PTR) :: buf_ptr !< #endif offset = 0 rd_flag = ( TRIM( action ) == 'READ' .OR. TRIM( action ) == 'read' ) wr_flag = ( TRIM( action ) == 'WRITE' .OR. TRIM( action ) == 'write' ) ! !-- Create subarrays and file types filetypes_created = .FALSE. ! !-- In case of read it is not known yet if data include total domain. Filetypes will be created !-- further below. IF ( wr_flag) THEN CALL rs_mpi_io_create_filetypes filetypes_created = .TRUE. ENDIF ! !-- Open file for MPI-IO #if defined( __parallel ) IF ( rd_flag ) THEN CALL MPI_FILE_OPEN( comm2d, TRIM( file_name ), MPI_MODE_RDONLY, MPI_INFO_NULL, fh, ierr ) WRITE (9,*) 'Open MPI-IO restart file for read ==> ', TRIM( file_name ) ELSEIF ( wr_flag ) THEN CALL MPI_FILE_OPEN( comm2d, TRIM( file_name ), MPI_MODE_CREATE+MPI_MODE_WRONLY, & MPI_INFO_NULL, fh, ierr ) WRITE (9,*) 'Open MPI-IO restart file for write ==> ', TRIM( file_name ) ELSE CALL rs_mpi_io_error( 1 ) ENDIF #else IF ( rd_flag ) THEN fh = posix_open( TRIM( file_name ), .TRUE. ) WRITE (9,*) 'Open sequential restart file for read ==> ', TRIM( file_name ), ' ', fh ELSEIF ( wr_flag ) THEN fh = posix_open( TRIM( file_name ), .FALSE. ) WRITE (9,*) 'Open sequential restart file for write ==> ', TRIM(file_name), ' ', fh ELSE CALL rs_mpi_io_error( 1 ) ENDIF IF ( fh < 0 ) CALL rs_mpi_io_error( 6 ) #endif array_position = 65536 !> Start offset for writing 2-D and 3.D arrays at 64 k header_position = 0 header_int_index = 1 header_char_index = 1 header_real_index = 1 header_arr_index = 1 int_names = ' ' int_values = 0 text_lines = ' ' real_names = ' ' real_values = 0.0 array_names = ' ' array_offset = 0 int_names(1) = 'nx' int_values(1) = nx int_names(2) = 'ny' int_values(2) = ny int_names(3) = 'nz' int_values(3) = nz header_int_index = header_int_index+3 DO i = 1, max_nr_arrays array_offset(i) = 0 array_names(i) = ' ' ENDDO gh_size = STORAGE_SIZE( tgh ) / 8 IF ( rd_flag ) THEN ! !-- File is open for read. #if defined( __parallel ) !-- Set the default view CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) ! !-- Read the file header size CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, tgh, gh_size, MPI_BYTE, status, ierr ) #else CALL posix_lseek( fh, header_position ) buf_ptr = C_LOC( tgh ) CALL posix_read( fh, buf_ptr, gh_size ) #endif header_position = header_position + gh_size include_total_domain_boundaries = ( tgh%i_outer_bound == 1 ) ! !-- File types depend on if boundaries of the total domain is included in data. This has been !-- checked with the previous statement. CALL rs_mpi_io_create_filetypes filetypes_created = .TRUE. #if defined( __parallel ) ! !-- Read INTEGER values CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, int_names, SIZE( int_names ) * 32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( int_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ (fh, int_values, SIZE( int_values ), MPI_INT, status, ierr ) header_position = header_position + SIZE( int_values ) * iwp ! !-- Character entries CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, text_lines, SIZE( text_lines ) * 128, MPI_CHAR, status, ierr ) header_position = header_position+size(text_lines) * 128 ! !-- REAL values CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, real_names, SIZE( real_names ) * 32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( real_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, real_values, SIZE( real_values ), MPI_REAL, status, ierr ) header_position = header_position + SIZE( real_values ) * wp ! !-- 2d- and 3d-array headers CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, array_names, SIZE( array_names ) * 32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( array_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ( fh, array_offset, SIZE( array_offset ) * MPI_OFFSET_KIND, MPI_BYTE, & status,ierr ) ! there is no I*8 datatype in Fortran header_position = header_position + SIZE( array_offset ) * rd_offset_kind #else CALL posix_lseek( fh, header_position ) CALL posix_read( fh, int_names ) header_position = header_position + SIZE( int_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_read( fh, int_values, SIZE( int_values ) ) header_position = header_position + SIZE( int_values ) * iwp ! !-- Character entries CALL posix_lseek( fh, header_position ) CALL posix_read( fh, text_lines ) header_position = header_position + SIZE( text_lines ) * 128 ! !-- REAL values CALL posix_lseek( fh, header_position ) CALL posix_read( fh, real_names ) header_position = header_position + SIZE( real_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_read( fh, real_values, SIZE( real_values ) ) header_position = header_position + SIZE( real_values ) * wp ! !-- 2d- and 3d-array headers CALL posix_lseek( fh, header_position ) CALL posix_read( fh, array_names ) header_position = header_position + SIZE( array_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_read( fh, array_offset, SIZE( array_offset ) ) ! there is no I*8 datatype in Fortran header_position = header_position + SIZE( array_offset ) * rd_offset_kind #endif IF ( debug_level >= 2 ) THEN WRITE (9,*) 'header positio after array metadata ', header_position ENDIF IF ( print_header_now ) CALL rs_mpi_io_print_header ENDIF END SUBROUTINE rd_mpi_io_open !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Check, if array exists in restart file !--------------------------------------------------------------------------------------------------! SUBROUTINE rd_mpi_io_check_array( name, found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name !< INTEGER(iwp) :: i !< LOGICAl :: found !< DO i = 1, tgh%nr_arrays IF ( TRIM( array_names(i) ) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. RETURN ENDIF ENDDO found = .FALSE. END SUBROUTINE rd_mpi_io_check_array !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read INTEGER with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_int( name, value, found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER(KIND=iwp), INTENT(OUT) :: value LOGICAL :: lo_found LOGICAL, INTENT(OUT), OPTIONAL :: found lo_found = .FALSE. value = 0 DO i = 1, tgh%nr_int IF ( TRIM(int_names(i)) == TRIM( name ) ) THEN IF ( debug_level >= 2 ) WRITE(9,*) 'INTEGER variable found ', name value = int_values(i) lo_found = .TRUE. EXIT ENDIF ENDDO IF ( PRESENT( found ) ) THEN found = lo_found RETURN ENDIF IF ( .NOT. lo_found ) THEN WRITE(9,*) 'INTEGER not found ', name CALL rs_mpi_io_error( 3 ) ENDIF END SUBROUTINE rrd_mpi_io_int !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read REAL with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_real( name, value, found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i LOGICAL :: lo_found LOGICAL, INTENT(OUT), OPTIONAL :: found REAL(KIND=wp), INTENT(OUT) :: value lo_found = .FALSE. value = 0.0 DO i = 1, tgh%nr_real IF ( TRIM(real_names(i)) == TRIM( name ) ) THEN IF ( debug_level >= 2 ) WRITE(9,*) 'REAL variable found ', name value = real_values(i) lo_found = .TRUE. EXIT ENDIF ENDDO IF ( PRESENT( found ) ) THEN found = lo_found RETURN ENDIF IF ( .NOT. lo_found ) THEN WRITE(9,*) 'REAL value not found ', name CALL rs_mpi_io_error(3) ENDIF END SUBROUTINE rrd_mpi_io_real !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-real array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_real_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif INTEGER(iwp) :: i LOGICAL :: found REAL(wp), INTENT(INOUT), DIMENSION(nysg:nyng,nxlg:nxrg) :: data REAL(KIND=wp), DIMENSION(lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_2d found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_2d, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_READ_ALL( fh, array_2d, SIZE( array_2d), MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read( fh, array_2d, SIZE( array_2d ) ) #endif IF ( include_total_domain_boundaries) THEN DO i = lb%nxl, lb%nxr data(lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) = array_2d(i,lb%nys:lb%nyn) ENDDO IF ( debug_level >= 2) WRITE(9,*) 'r2f_ob ', TRIM(name),' ', SUM( data(nys:nyn,nxl:nxr) ) ELSE DO i = nxl, nxr data(nys:nyn,i) = array_2d(i,nys:nyn) ENDDO IF ( debug_level >= 2) WRITE(9,*) 'r2f ', TRIM( name ),' ', SUM( data) ENDIF CALL exchange_horiz_2d( data ) ELSE WRITE(9,*) 'array_2D not found ', name CALL rs_mpi_io_error( 2 ) ENDIF END SUBROUTINE rrd_mpi_io_real_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-INTEGER array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_int_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER(iwp) :: j #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif INTEGER, DIMENSION(nxl:nxr,nys:nyn) :: array_2d INTEGER(KIND=iwp), INTENT(INOUT), DIMENSION(:,:) :: data LOGICAL :: found found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN IF ( ( nxr - nxl + 1 + 2*nbgp ) == SIZE( data, 2 ) ) THEN ! !-- Output array with Halos. !-- ATTENTION: INTEGER array with ghost boundaries are not implemented yet. This kind of array !-- would be dimensioned in the caller subroutine like this: !-- INTEGER, DIMENSION(nysg:nyng,nxlg:nxrg):: data CALL rs_mpi_io_error( 2 ) ELSEIF ( (nxr-nxl+1) == SIZE( data, 2 ) ) THEN ! !-- INTEGER input array without Halos. !-- This kind of array is dimensioned in the caller subroutine !-- INTEGER, DIMENSION(nys:nyn,nxl:nxr) :: data #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_INTEGER, ft_2di_nb, 'native', & MPI_INFO_NULL, ierr ) CALL MPI_FILE_READ_ALL( fh, array_2d, SIZE( array_2d), MPI_INTEGER, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read( fh, array_2d, SIZE( array_2d ) ) #endif DO j = nys, nyn DO i = nxl, nxr data(j-nys+1,i-nxl+1) = array_2d(i,j) ENDDO ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'r2i ', TRIM( name ),' ', SUM( array_2d ) ELSE WRITE (9,*) '### rrd_mpi_io_int_2d array: ', TRIM( name ) CALL rs_mpi_io_error( 4 ) ENDIF ELSE WRITE(9,*) 'array_2D not found ', name CALL rs_mpi_io_error( 2 ) ENDIF END SUBROUTINE rrd_mpi_io_int_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-REAL array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_real_3d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif LOGICAL :: found REAL(KIND=wp), DIMENSION(nzb:nzt+1,lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_3d REAL(wp), INTENT(INOUT), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg) :: data found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_3d, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_READ_ALL( fh, array_3d, SIZE( array_3d ), MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read(fh, array_3d, SIZE( array_3d ) ) #endif IF ( include_total_domain_boundaries) THEN DO i = lb%nxl, lb%nxr data(:,lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) = array_3d(:,i,lb%nys:lb%nyn) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'r3f_ob ', TRIM( name ),' ', SUM( data(:,nys:nyn,nxl:nxr) ) ELSE DO i = nxl, nxr data(:,nys:nyn,i) = array_3d(:,i,nys:nyn) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'r3f ', TRIM( name ),' ', SUM( data ) ENDIF CALL exchange_horiz( data, nbgp ) ELSE WRITE(9,*) 'array_3D not found ', name CALL rs_mpi_io_error(2) ENDIF END SUBROUTINE rrd_mpi_io_real_3d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 3d-REAL soil array with MPI-IO !> nzb_soil, nzt_soil are located in the module land_surface_model_mod. Since Fortran does not allow !> cross referencing of module variables, it is required to pass these variables as arguments. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_real_3d_soil( name, data, nzb_soil, nzt_soil ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER, INTENT(IN) :: nzb_soil INTEGER, INTENT(IN) :: nzt_soil #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif LOGICAL :: found REAL(KIND=wp), DIMENSION(nzb_soil:nzt_soil,lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_3d REAL(wp), INTENT(INOUT), DIMENSION(nzb_soil:nzt_soil,nysg:nyng,nxlg:nxrg) :: data found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN #if defined( __parallel ) CALL rd_mpi_io_create_filetypes_3dsoil( nzb_soil, nzt_soil ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_3dsoil, 'native', MPI_INFO_NULL, & ierr ) CALL MPI_FILE_READ_ALL( fh, array_3d, SIZE( array_3d ), MPI_REAL, status, ierr ) CALL MPI_TYPE_FREE( ft_3dsoil, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read( fh, array_3d, SIZE( array_3d ) ) #endif IF ( include_total_domain_boundaries ) THEN DO i = lb%nxl, lb%nxr data(:,lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) = array_3d(:,i,lb%nys:lb%nyn) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'r3f_ob_soil ', TRIM( name ),' ', SUM( data(:,nys:nyn,nxl:nxr) ) ELSE DO i = nxl, nxr data(:,nys:nyn,i) = array_3d(:,i,nys:nyn) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'r3f_soil ', TRIM( name ),' ', SUM( array_3d ) ENDIF ELSE WRITE(9,*) 'array_3D not found ', name CALL rs_mpi_io_error( 2 ) ENDIF END SUBROUTINE rrd_mpi_io_real_3d_soil !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read CHARACTER with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_char( name, text, found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name CHARACTER(LEN=*), INTENT(OUT) :: text CHARACTER(LEN=128) :: lo_line INTEGER(iwp) :: i LOGICAL, INTENT(OUT), OPTIONAL :: found LOGICAL :: lo_found lo_found = .FALSE. text = ' ' DO i = 1, tgh%nr_char lo_line = text_lines(i) IF ( lo_line(1:32) == name ) THEN IF ( debug_level >= 2 ) WRITE(9,*) 'Character variable found ==> ', lo_line(1:32) text = lo_line(33:) lo_found = .TRUE. EXIT ENDIF ENDDO IF ( PRESENT( found ) ) THEN found = lo_found RETURN ENDIF IF ( .NOT. lo_found ) THEN WRITE(9,*) 'Character variable not found ', name CALL rs_mpi_io_error( 3 ) ENDIF END SUBROUTINE rrd_mpi_io_char !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read LOGICAL with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_logical( name, value ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: logical_as_integer LOGICAL, INTENT(OUT) :: value CALL rrd_mpi_io_int( name, logical_as_integer ) value = ( logical_as_integer == 1 ) END SUBROUTINE rrd_mpi_io_logical !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write INTEGER with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_int( name, value ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(KIND=iwp), INTENT(IN) :: value int_names(header_int_index) = name int_values(header_int_index) = value header_int_index = header_int_index + 1 END SUBROUTINE wrd_mpi_io_int SUBROUTINE wrd_mpi_io_real( name, value ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name REAL(wp), INTENT(IN) :: value real_names(header_real_index) = name real_values(header_real_index) = value header_real_index = header_real_index + 1 END SUBROUTINE wrd_mpi_io_real !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 2d-REAL array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_real_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif REAL(KIND=wp), DIMENSION(lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_2d REAL(wp), INTENT(IN), DIMENSION(nysg:nyng,nxlg:nxrg) :: data array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( include_total_domain_boundaries ) THEN ! !-- Prepare Output with outer boundaries DO i = lb%nxl, lb%nxr array_2d(i,lb%nys:lb%nyn) = data(lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w2f_ob ', TRIM( name ), ' ', SUM( data(nys:nyn,nxl:nxr) ) ELSE ! !-- Prepare Output without outer boundaries DO i = nxl,nxr array_2d(i,lb%nys:lb%nyn) = data(nys:nyn,i) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w2f ', TRIM( name ),' ', SUM( array_2d ) ENDIF #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_2d, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_WRITE_ALL( fh, array_2d, SIZE( array_2d), MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, array_2d, SIZE( array_2d ) ) #endif ! !-- Type conversion required, otherwise rigth hand side brackets are calculated assuming 4 byte INT. !-- Maybe a compiler problem. array_position = array_position + ( INT( lb%ny, KIND=rd_offset_kind ) + 1 ) * & ( INT( lb%nx, KIND=rd_offset_kind ) + 1 ) * wp END SUBROUTINE wrd_mpi_io_real_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 2d-INTEGER array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_int_2d( name, data, ar_found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER(iwp) :: j #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif INTEGER(KIND=iwp), INTENT(IN), DIMENSION(:,:) :: data INTEGER, DIMENSION(nxl:nxr,nys:nyn) :: array_2d LOGICAl, OPTIONAL :: ar_found array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( ( nxr-nxl + 1 + 2 * nbgp ) == SIZE( data, 2 ) ) THEN ! !-- Integer arrays with ghost layers are not implemented yet. These kind of arrays would be !-- dimensioned in the caller subroutine as !-- INTEGER, DIMENSION(nysg:nyng,nxlg:nxrg) :: data WRITE (9,*) '### wrd_mpi_io_int_2d IF array: ', TRIM( name ) CALL rs_mpi_io_error( 4 ) ELSEIF ( ( nxr-nxl+1 ) == SIZE( data, 2 ) ) THEN ! !-- INTEGER input array without ghost layers. !-- This kind of array is dimensioned in the caller subroutine as !-- INTEGER, DIMENSION(nys:nyn,nxl:nxr) :: data DO j = nys, nyn DO i = nxl, nxr array_2d(i,j) = data(j-nys+1,i-nxl+1) ENDDO ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w2i ', TRIM( name ), ' ', SUM( array_2d ) #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_INTEGER, ft_2di_nb, 'native', MPI_INFO_NULL,& ierr ) CALL MPI_FILE_WRITE_ALL( fh, array_2d, SIZE( array_2d ), MPI_INTEGER, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, array_2d, SIZE( array_2d ) ) #endif ! !-- Type conversion required, otherwise rigth hand side brackets are calculated assuming 4 byte !-- INT. Maybe a compiler problem. array_position = array_position + INT( (ny+1), KIND=rd_offset_kind ) * & INT( (nx+1), KIND=rd_offset_kind ) * 4 ELSE WRITE (9,*) '### wrd_mpi_io_int_2d array: ', TRIM( name ) CALL rs_mpi_io_error( 4 ) ENDIF IF ( PRESENT( ar_found ) ) ar_found = .TRUE. END SUBROUTINE wrd_mpi_io_int_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 3d-REAL array with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_real_3d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif REAL(wp), INTENT(IN), DIMENSION(nzb:nzt+1,nysg:nyng,nxlg:nxrg) :: data REAL(KIND=wp), DIMENSION(nzb:nzt+1,lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_3d array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( include_total_domain_boundaries ) THEN ! !-- Prepare output of 3d-REAL-array with ghost layers. !-- In the virtual PE grid, the first dimension is PEs along x, and the second along y. !-- For MPI-IO it is recommended to have the index order of the array in the same way, i.e. !-- the first dimension should be along x and the second along y. !-- For this reason, the original PALM data need to be swaped. DO i = lb%nxl, lb%nxr array_3d(:,i,lb%nys:lb%nyn) = data(:,lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w3f_ob ', TRIM( name ),' ', SUM( data(:,nys:nyn,nxl:nxr) ) ELSE ! !-- Prepare output of 3d-REAL-array without ghost layers DO i = nxl, nxr array_3d(:,i,lb%nys:lb%nyn) = data(:,nys:nyn,i) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w3f ', TRIM( name ),' ', SUM( array_3d ) ENDIF #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_3d, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_WRITE_ALL( fh, array_3d, SIZE( array_3d ), MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, array_3d, SIZE( array_3d ) ) #endif ! !-- Type conversion required, otherwise rigth hand side brackets are calculated assuming 4 byte INT. !-- Maybe a compiler problem. array_position = array_position + INT( (nz+2), KIND=rd_offset_kind ) * & INT( (lb%ny+1), KIND=rd_offset_kind ) * & INT( (lb%nx+1), KIND=rd_offset_kind ) * wp END SUBROUTINE wrd_mpi_io_real_3d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 3d-REAL soil array with MPI-IO. !> nzb_soil, nzt_soil are located in the module land_surface_model_mod. Since Fortran does not allow !> cross referencing of module variables, it is required to pass these variables as arguments. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_real_3d_soil( name, data, nzb_soil, nzt_soil ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER, INTENT(IN) :: nzb_soil INTEGER, INTENT(IN) :: nzt_soil #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif REAL(wp), INTENT(IN), DIMENSION(nzb_soil:nzt_soil,nysg:nyng,nxlg:nxrg) :: data REAL(KIND=wp), DIMENSION(nzb_soil:nzt_soil,lb%nxl:lb%nxr,lb%nys:lb%nyn) :: array_3d array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( include_total_domain_boundaries) THEN ! !-- Prepare output of 3d-REAL-array with ghost layers. !-- In the virtual PE grid, the first dimension is PEs along x, and the second along y. !-- For MPI-IO it is recommended to have the index order of the array in the same way, i.e. !-- the first dimension should be along x and the second along y. !-- For this reason, the original PALM data need to be swaped. DO i = lb%nxl, lb%nxr array_3d(:,i,lb%nys:lb%nyn) = data(:,lb%nys-nbgp:lb%nyn-nbgp,i-nbgp) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w3f_ob_soil ', TRIM( name ), ' ', SUM( data(:,nys:nyn,nxl:nxr) ) ELSE ! !-- Prepare output of 3d-REAL-array without ghost layers DO i = nxl, nxr array_3d(:,i,lb%nys:lb%nyn) = data(:,nys:nyn,i) ENDDO IF ( debug_level >= 2 ) WRITE(9,*) 'w3f_soil ', TRIM( name ), ' ', SUM( array_3d ) ENDIF #if defined( __parallel ) CALL rd_mpi_io_create_filetypes_3dsoil( nzb_soil, nzt_soil ) CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_3dsoil, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_WRITE_ALL( fh, array_3d, SIZE( array_3d ), MPI_REAL, status, ierr ) CALL MPI_TYPE_FREE( ft_3dsoil, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, array_3d, SIZE( array_3d ) ) #endif ! !-- Type conversion required, otherwise rigth hand side brackets are calculated assuming 4 byte INT. !-- Maybe a compiler problem. array_position = array_position + INT( (nzt_soil-nzb_soil+1), KIND=rd_offset_kind ) * & INT( (lb%ny+1), KIND=rd_offset_kind ) * & INT( (lb%nx+1), KIND=rd_offset_kind ) * wp END SUBROUTINE wrd_mpi_io_real_3d_soil !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write CHARATCTER with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_char( name, text ) IMPLICIT NONE CHARACTER(LEN=128) :: lo_line CHARACTER(LEN=*), INTENT(IN) :: name CHARACTER(LEN=*), INTENT(IN) :: text lo_line = name lo_line(33:) = text text_lines(header_char_index) = lo_line header_char_index = header_char_index + 1 END SUBROUTINE wrd_mpi_io_char !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write LOGICAL with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_logical( name, value ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: logical_as_integer LOGICAL, INTENT(IN) :: value IF ( value ) THEN logical_as_integer = 1 ELSE logical_as_integer = 0 ENDIF CALL wrd_mpi_io_int( name, logical_as_integer ) END SUBROUTINE wrd_mpi_io_logical !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 1d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_global_array_real_1d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER(KIND=rd_offset_kind) :: offset #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif LOGICAL :: found REAL(KIND=wp), INTENT(INOUT), DIMENSION(:) :: data offset = 0 found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN ! !-- Set default view #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_SEEK( fh, array_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ_ALL( fh, data, SIZE( data ), MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read( fh, data, SIZE( data ) ) #endif IF ( debug_level >= 2) WRITE(9,*) 'rr1f ',name,' ', SUM( data) ELSE WRITE(9,*) 'replicated array_1D not found ', name CALL rs_mpi_io_error( 2 ) ENDIF END SUBROUTINE rrd_mpi_io_global_array_real_1d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_global_array_real_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), INTENT(IN), DIMENSION(:,:), TARGET :: data REAL(KIND=wp), POINTER, DIMENSION(:) :: buf TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data ) CALL C_F_POINTER( c_data, buf, bufshape ) CALL rrd_mpi_io_global_array_real_1d( name, buf ) IF ( debug_level >= 2 ) WRITE(9,*) 'rr2f ', TRIM( name ), ' ', bufshape(1), SUM( data ) END SUBROUTINE rrd_mpi_io_global_array_real_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 3d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_global_array_real_3d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), INTENT(IN), DIMENSION(:,:,:), TARGET :: data REAL(KIND=wp), POINTER, DIMENSION(:) :: buf TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data ) CALL C_F_POINTER( c_data, buf, bufshape ) CALL rrd_mpi_io_global_array_real_1d( name, buf ) IF ( debug_level >= 2 ) WRITE(9,*) 'rr3f ', TRIM( name ), ' ', bufshape(1), SUM( data ) END SUBROUTINE rrd_mpi_io_global_array_real_3d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 4d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_global_array_real_4d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), INTENT(IN), DIMENSION(:,:,:,:), TARGET :: data REAL(KIND=wp), POINTER, DIMENSION(:) :: buf TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data) CALL C_F_POINTER( c_data, buf, bufshape ) CALL rrd_mpi_io_global_array_real_1d( name, buf ) IF ( debug_level >= 2 ) WRITE(9,*) 'rr4f ', TRIM( name ), ' ', bufshape(1), SUM( data ) END SUBROUTINE rrd_mpi_io_global_array_real_4d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 1d-INTEGER global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_global_array_int_1d( name, data, ar_found ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i INTEGER(KIND=rd_offset_kind) :: offset #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif INTEGER(KIND=iwp), INTENT(INOUT), DIMENSION(:) :: data LOGICAl, OPTIONAL :: ar_found LOGICAL :: found offset = 0 found = .FALSE. DO i = 1, tgh%nr_arrays IF ( TRIM(array_names(i)) == TRIM( name ) ) THEN array_position = array_offset(i) found = .TRUE. EXIT ENDIF ENDDO IF ( found ) THEN ! !-- Set default view #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_SEEK( fh, array_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_READ_ALL( fh, data, SIZE( data), MPI_INTEGER, status, ierr ) #else CALL posix_lseek( fh, array_position ) CALL posix_read( fh, data, SIZE( data ) ) #endif ELSE IF ( PRESENT( ar_found ) ) THEN ar_found =.FALSE. RETURN ELSE WRITE (9,*) '### rrd_mpi_io_global_array_int_1d ', TRIM( name ) CALL rs_mpi_io_error( 4 ) WRITE(9,*) 'replicated array_1D not found ', name CALL rs_mpi_io_error( 2 ) ENDIF ENDIF IF ( PRESENT( ar_found ) ) ar_found =.TRUE. END SUBROUTINE rrd_mpi_io_global_array_int_1d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 1d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_global_array_real_1d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(KIND=rd_offset_kind) :: offset #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif REAL(KIND=wp), INTENT(IN), DIMENSION(:) :: data offset = 0 array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( debug_level >= 2 ) WRITE(9,*) 'wr1f ', TRIM( name ), ' ', SUM( data ) ! !-- Set default view #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) ! !-- Only PE 0 writes replicated data IF ( myid == 0 ) THEN CALL MPI_FILE_SEEK( fh, array_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, data, SIZE( data), MPI_REAL, status, ierr ) ENDIF #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, data, SIZE( data ) ) #endif array_position = array_position + SIZE( data ) * wp END SUBROUTINE wrd_mpi_io_global_array_real_1d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 2d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_global_array_real_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), POINTER, DIMENSION(:) :: buf REAL(KIND=wp), INTENT(IN), DIMENSION(:,:), TARGET :: data TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data) CALL C_F_POINTER( c_data, buf, bufshape ) IF ( debug_level >= 2 ) WRITE(9,*) 'wr2f ', TRIM( name ), ' ', bufshape(1), SUM( data ) CALL wrd_mpi_io_global_array_real_1d( name, buf ) END SUBROUTINE wrd_mpi_io_global_array_real_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 3d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_global_array_real_3d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), POINTER, DIMENSION(:) :: buf REAL(KIND=wp), INTENT(IN), DIMENSION(:,:,:), TARGET :: data TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data ) CALL C_F_POINTER( c_data, buf, bufshape ) IF ( debug_level >= 2 ) WRITE(9,*) 'wr3f ', TRIM( name ), ' ', bufshape(1), SUM( data ) CALL wrd_mpi_io_global_array_real_1d( name, buf ) END SUBROUTINE wrd_mpi_io_global_array_real_3d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 4d-REAL global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_global_array_real_4d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER, DIMENSION(1) :: bufshape REAL(KIND=wp), POINTER, DIMENSION(:) :: buf REAL(KIND=wp), INTENT(IN), DIMENSION(:,:,:,:), TARGET :: data TYPE(C_PTR) :: c_data c_data = C_LOC( data ) bufshape(1) = SIZE( data) CALL C_F_POINTER( c_data, buf, bufshape ) IF ( debug_level >= 2 ) WRITE(9,*) 'wr4f ', TRIM( name ), ' ', bufshape(1), SUM( data ) CALL wrd_mpi_io_global_array_real_1d( name, buf ) END SUBROUTINE wrd_mpi_io_global_array_real_4d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 1d-INTEGER global array with MPI-IO. !> Array contains identical data on all PEs. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_global_array_int_1d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(KIND=rd_offset_kind) :: offset INTEGER(KIND=iwp), INTENT(IN), DIMENSION(:) :: data #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif offset = 0 array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 IF ( debug_level >= 2 ) WRITE(9,*) 'wr1i ', TRIM( name ), ' ', SUM( data ) ! !-- Set default view #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) ! !-- Only PE 0 writes replicated data IF ( myid == 0 ) THEN ! CALL MPI_FILE_SEEK( fh, array_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, data, SIZE( data), MPI_INTEGER, status, ierr ) ENDIF #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, data, SIZE( data ) ) #endif array_position = array_position + SIZE( data ) * 4 END SUBROUTINE wrd_mpi_io_global_array_int_1d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 1d-REAL surface data array with MPI-IO. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_surface( name, data, first_index ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(KIND=rd_offset_kind) :: disp !< displacement of actual indices INTEGER(KIND=rd_offset_kind) :: disp_f !< displacement in file INTEGER(KIND=rd_offset_kind) :: disp_n !< displacement of next column INTEGER(iwp), OPTIONAL :: first_index INTEGER(iwp) :: i INTEGER(iwp) :: i_f INTEGER(iwp) :: j INTEGER(iwp) :: j_f INTEGER(iwp) :: lo_first_index INTEGER(iwp) :: nr_bytes INTEGER(iwp) :: nr_bytes_f INTEGER(iwp) :: nr_words #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif LOGICAL :: found REAL(wp), INTENT(OUT), DIMENSION(:) :: data found = .FALSE. lo_first_index = 1 IF ( MAXVAL( m_global_start ) == -1 ) RETURN ! nothing to do on this PE IF ( PRESENT( first_index ) ) THEN lo_first_index = first_index ENDIF DO i = 1, tgh%nr_arrays IF ( TRIM( array_names(i) ) == TRIM( name ) ) THEN array_position = array_offset(i) + ( lo_first_index - 1 ) * & total_number_of_surface_values * wp found = .TRUE. EXIT ENDIF ENDDO disp = -1 disp_f = -1 disp_n = -1 IF ( found ) THEN DO i = nxl, nxr DO j = nys, nyn IF ( m_global_start(j,i) > 0 ) THEN disp = array_position+(m_global_start(j,i)-1) * wp nr_words = m_end_index(j,i)-m_start_index(j,i)+1 nr_bytes = nr_words * wp ENDIF IF ( disp >= 0 .AND. disp_f == -1 ) THEN ! first Entry disp_f = disp nr_bytes_f = 0 i_f = i j_f = j ENDIF IF ( j == nyn .AND. i == nxr ) THEN ! last Entry disp_n = -1 IF ( nr_bytes > 0 ) THEN nr_bytes_f = nr_bytes_f+nr_bytes ENDIF ELSEIF ( j == nyn ) THEN ! next x IF ( m_global_start(nys,i+1) > 0 .AND. disp > 0 ) THEN disp_n = array_position + ( m_global_start(nys,i+1) - 1 ) * wp ELSE CYCLE ENDIF ELSE IF ( m_global_start(j+1,i) > 0 .AND. disp > 0 ) THEN disp_n = array_position + ( m_global_start(j+1,i) - 1 ) * wp ELSE CYCLE ENDIF ENDIF IF ( disp + nr_bytes == disp_n ) THEN ! contiguous block nr_bytes_f = nr_bytes_f + nr_bytes ELSE ! read #if defined( __parallel ) IF ( debug_level >= 2 ) WRITE(9,'(a,8i8)') 'read block ', j, i, j_f, i_f, & m_start_index(j_f,i_f), nr_bytes_f, disp_f CALL MPI_FILE_SEEK( fh, disp_f, MPI_SEEK_SET, ierr ) nr_words = nr_bytes_f / wp CALL MPI_FILE_READ( fh, data(m_start_index(j_f,i_f)), nr_words, MPI_REAL, status, ierr ) #else CALL posix_lseek( fh, disp_f ) ! CALL posix_read( fh, data(m_start_index(j_f:,i_f:)), nr_bytes_f ) #endif disp_f = disp nr_bytes_f = nr_bytes i_f = i j_f = j ENDIF ENDDO ENDDO ELSE WRITE(9,*) 'surface array not found ', name CALL rs_mpi_io_error( 2 ) ENDIF IF ( lo_first_index == 1 ) THEN IF ( debug_level >= 2 .AND. nr_val > 0 ) WRITE(9,*) 'r_surf ', TRIM( name ), ' ', nr_val, SUM( data(1:nr_val) ) ELSE IF ( debug_level >= 2 .AND. nr_val > 0 ) WRITE(9,*) 'r_surf_next ', TRIM( name ), ' ', & lo_first_index,nr_val, SUM( data(1:nr_val) ) ENDIF END SUBROUTINE rrd_mpi_io_surface !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-REAL surface data array with MPI-IO. !> These consist of multiple 1d-REAL surface data arrays. !--------------------------------------------------------------------------------------------------! SUBROUTINE rrd_mpi_io_surface_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i REAL(wp), INTENT(OUT), DIMENSION(:,:) :: data REAL(wp), DIMENSION(SIZE( data,2)) :: tmp DO i = 1, SIZE( data,1) CALL rrd_mpi_io_surface( name, tmp, first_index = i ) data(i,:) = tmp ENDDO ! !-- Comment from Klaus Ketelsen (September 2018) !-- The intention of the following loop was let the compiler do the copying on return. !-- In my understanding is it standard conform to pass the second dimension to a 1d- !-- array inside a subroutine and the compiler is responsible to generate code for !-- copying. Acually this works fine for INTENT(IN) variables (wrd_mpi_io_surface_2d). !-- For INTENT(OUT) like in this case the code works on pgi compiler. But both, the Intel 16 !-- and the Cray compiler show wrong answers using this loop. !-- That is the reason why the above auxiliary array tmp was introduced. ! DO i = 1, SIZE( data,1) ! CALL rrd_mpi_io_surface( name, data(i,:), first_index = i ) ! ENDDO END SUBROUTINE rrd_mpi_io_surface_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Write 1d-REAL surface data array with MPI-IO. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_surface( name, data, first_index ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name #if defined( __parallel ) INTEGER(KIND=rd_offset_kind) :: disp #endif INTEGER(iwp), OPTIONAL :: first_index INTEGER(iwp) :: lo_first_index INTEGER(KIND=rd_offset_kind) :: offset #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif REAL(wp), INTENT(IN), DIMENSION(:) :: data offset = 0 lo_first_index = 1 IF ( PRESENT(first_index) ) THEN lo_first_index = first_index ENDIF ! !-- In case of 2d-data, name is writen only once IF ( lo_first_index == 1 ) THEN array_names(header_arr_index) = name array_offset(header_arr_index) = array_position header_arr_index = header_arr_index + 1 ENDIF #if defined( __parallel ) IF ( all_pes_write ) THEN CALL MPI_FILE_SET_VIEW( fh, array_position, MPI_REAL, ft_surf, 'native', MPI_INFO_NULL, ierr ) CALL MPI_FILE_WRITE_ALL( fh, data, nr_val, MPI_REAL, status, ierr ) ELSE CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) IF ( nr_val > 0 ) THEN disp = array_position + 8 * ( glo_start - 1 ) CALL MPI_FILE_SEEK( fh, disp, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, data, nr_val, MPI_REAL, status, ierr ) ENDIF ENDIF #else CALL posix_lseek( fh, array_position ) CALL posix_write( fh, data, nr_val ) #endif array_position = array_position + total_number_of_surface_values * wp IF ( lo_first_index == 1 ) THEN IF ( debug_level >= 2 .AND. nr_val > 0 ) WRITE(9,*) 'w_surf ', TRIM( name ), ' ', nr_val, SUM( data(1:nr_val) ) ELSE IF ( debug_level >= 2 .AND. nr_val > 0 ) WRITE(9,*) 'w_surf_n ', TRIM( name ), ' ', & lo_first_index, nr_val, SUM( data(1:nr_val) ) ENDIF END SUBROUTINE wrd_mpi_io_surface !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Read 2d-REAL surface data array with MPI-IO. !> These consist of multiple 1d-REAL surface data arrays. !--------------------------------------------------------------------------------------------------! SUBROUTINE wrd_mpi_io_surface_2d( name, data ) IMPLICIT NONE CHARACTER(LEN=*), INTENT(IN) :: name INTEGER(iwp) :: i REAL(wp), INTENT(IN), DIMENSION(:,:) :: data DO i = 1, SIZE( data,1) CALL wrd_mpi_io_surface( name, data(i,:), first_index = i ) ENDDO END SUBROUTINE wrd_mpi_io_surface_2d !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Close restart file for MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rd_mpi_io_close IMPLICIT NONE INTEGER(iwp) :: gh_size INTEGER(KIND=rd_offset_kind) :: offset #if defined( __parallel ) INTEGER, DIMENSION(rd_status_size) :: status #endif #if ! defined( __parallel ) TYPE(C_PTR) :: buf_ptr #endif offset = 0 IF ( wr_flag ) THEN tgh%nr_int = header_int_index - 1 tgh%nr_char = header_char_index - 1 tgh%nr_real = header_real_index - 1 tgh%nr_arrays = header_arr_index - 1 tgh%total_nx = lb%nx + 1 tgh%total_ny = lb%ny + 1 IF ( include_total_domain_boundaries ) THEN ! not sure, if LOGICAL interpretation is the same on all compilers, tgh%i_outer_bound = 1 ! therefore store as INTEGER in general header ELSE tgh%i_outer_bound = 0 ENDIF ! !-- Check for big/little endian format. This check is currently not used, and could be removed !-- if we can assume little endian as the default on all machines. CALL rs_mpi_io_check_endian( tgh%endian ) ! !-- Set default view #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) #endif ! !-- Write header file gh_size = storage_size(tgh) / 8 IF ( myid == 0 ) THEN #if defined( __parallel ) CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, tgh, gh_size, MPI_INT, status, ierr ) header_position = header_position + gh_size ! !-- INTEGER values CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, int_names, SIZE( int_names )*32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( int_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, int_values, SIZE( int_values ), MPI_INT, status, ierr ) header_position = header_position + SIZE( int_values ) * iwp ! !-- Character entries CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, text_lines, SIZE( text_lines )*128, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( text_lines ) * 128 ! !--- REAL values CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, real_names, SIZE( real_names )*32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( real_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, real_values, SIZE( real_values ), MPI_REAL, status, ierr ) header_position = header_position + SIZE( real_values ) * wp ! !-- 2d- and 3d- distributed array headers, all replicated array headers CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, array_names, SIZE( array_names )*32, MPI_CHAR, status, ierr ) header_position = header_position + SIZE( array_names ) * 32 CALL MPI_FILE_SEEK( fh, header_position, MPI_SEEK_SET, ierr ) CALL MPI_FILE_WRITE( fh, array_offset, SIZE( array_offset )*MPI_OFFSET_KIND, MPI_BYTE, status, ierr ) !There is no I*8 datatype in FORTRAN header_position = header_position + SIZE( array_offset ) * rd_offset_kind #else CALL posix_lseek( fh, header_position ) buf_ptr = C_LOC( tgh ) CALL posix_write( fh, buf_ptr, gh_size ) header_position = header_position + gh_size ! !-- INTEGER values CALL posix_lseek( fh, header_position ) CALL posix_write( fh, int_names ) header_position = header_position + SIZE( int_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_write( fh, int_values, SIZE( int_values ) ) header_position = header_position + SIZE( int_values ) * iwp ! !-- Character entries CALL posix_lseek( fh, header_position ) CALL posix_write( fh, text_lines ) header_position = header_position + SIZE( text_lines ) * 128 ! !-- REAL values CALL posix_lseek( fh, header_position ) CALL posix_write( fh, real_names ) header_position = header_position + SIZE( real_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_write( fh, real_values, SIZE( real_values ) ) header_position = header_position + SIZE( real_values ) * wp ! !-- 2d- and 3d-distributed array headers, all replicated array headers CALL posix_lseek( fh, header_position ) CALL posix_write( fh, array_names ) header_position = header_position + SIZE( array_names ) * 32 CALL posix_lseek( fh, header_position ) CALL posix_write( fh, array_offset, SIZE( array_offset ) ) header_position = header_position + SIZE( array_offset ) * rd_offset_kind #endif IF ( debug_level >= 2 ) THEN WRITE(9,*) 'header position after arrays ', header_position, gh_size ENDIF IF ( print_header_now ) CALL rs_mpi_io_print_header ENDIF ENDIF ! !-- Free file types CALL rs_mpi_io_free_filetypes ! !-- Close MPI-IO file #if defined( __parallel ) CALL MPI_FILE_CLOSE( fh, ierr ) #else CALL posix_close( fh ) #endif mb_processed = array_position / ( 1024.0_dp * 1024.0_dp ) END SUBROUTINE rd_mpi_io_close !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> This subroutine prepares a filetype and some variables for the I/O of surface data. !> Whenever a new set of start_index and end_index is used, rd_mpi_io_surface_filetypes has to be !> called. A main feature of this subroutine is computing the global start indices of the 1d- and !> 2d- surface arrays. !--------------------------------------------------------------------------------------------------! SUBROUTINE rd_mpi_io_surface_filetypes( start_index, end_index, data_to_write, global_start ) IMPLICIT NONE INTEGER(iwp) :: i !< loop index INTEGER(KIND=rd_offset_kind) :: offset INTEGER(iwp), DIMENSION(1) :: dims1 INTEGER(iwp), DIMENSION(1) :: lize1 INTEGER(iwp), DIMENSION(1) :: start1 INTEGER(iwp), DIMENSION(0:numprocs-1) :: lo_nr_val !< local number of values in x and y direction INTEGER(iwp), DIMENSION(0:numprocs-1) :: all_nr_val !< number of values for all PEs INTEGER, INTENT(IN), DIMENSION(nys:nyn,nxl:nxr) :: end_index INTEGER, INTENT(OUT), DIMENSION(nys:nyn,nxl:nxr) :: global_start INTEGER, INTENT(IN), DIMENSION(nys:nyn,nxl:nxr) :: start_index LOGICAL, INTENT(OUT) :: data_to_write !< returns, if surface data have to be written offset = 0 lo_nr_val= 0 lo_nr_val(myid) = MAXVAL( end_index ) #if defined( __parallel ) CALL MPI_ALLREDUCE( lo_nr_val, all_nr_val, numprocs, MPI_INTEGER, MPI_SUM, comm2d, ierr ) IF ( ft_surf /= -1 ) THEN CALL MPI_TYPE_FREE( ft_surf, ierr ) ! if set, free last surface filetype ENDIF #else all_nr_val(myid) = lo_nr_val(myid) #endif nr_val = lo_nr_val(myid) total_number_of_surface_values = 0 DO i = 0, numprocs-1 IF ( i == myid ) THEN glo_start = total_number_of_surface_values + 1 ENDIF total_number_of_surface_values = total_number_of_surface_values + all_nr_val(i) ENDDO ! !-- Actions during read IF ( rd_flag ) THEN IF ( .NOT. ALLOCATED( m_start_index ) ) ALLOCATE( m_start_index(nys:nyn,nxl:nxr) ) IF ( .NOT. ALLOCATED( m_end_index ) ) ALLOCATE( m_end_index(nys:nyn,nxl:nxr) ) IF ( .NOT. ALLOCATED( m_global_start ) ) ALLOCATE( m_global_start(nys:nyn,nxl:nxr) ) ! !-- Save arrays for later reading m_start_index = start_index m_end_index = end_index m_global_start = global_start nr_val = MAXVAL( end_index ) #if defined( __parallel ) CALL MPI_FILE_SET_VIEW( fh, offset, MPI_BYTE, MPI_BYTE, 'native', MPI_INFO_NULL, ierr ) #endif ENDIF ! !-- Actions during write IF ( wr_flag ) THEN ! !-- Create surface filetype ft_surf = -1 global_start = start_index + glo_start - 1 WHERE ( end_index < start_index ) global_start = -1 ENDWHERE ! !-- Check, if surface data exist on this PE data_to_write = .TRUE. IF ( total_number_of_surface_values == 0 ) THEN data_to_write = .FALSE. RETURN ENDIF all_pes_write = ( MINVAL( all_nr_val ) > 0 ) IF ( all_pes_write ) THEN dims1(1) = total_number_of_surface_values lize1(1) = nr_val start1(1) = glo_start-1 #if defined( __parallel ) IF ( total_number_of_surface_values > 0 ) THEN CALL MPI_TYPE_CREATE_SUBARRAY( 1, dims1, lize1, start1, MPI_ORDER_FORTRAN, MPI_REAL, ft_surf, ierr ) CALL MPI_TYPE_COMMIT( ft_surf, ierr ) ENDIF #endif ENDIF ENDIF END SUBROUTINE rd_mpi_io_surface_filetypes !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> This subroutine creates file types to access 2d-/3d-REAL arrays and 2d-INTEGER arrays !> distributed in blocks among processes to a single file that contains the global arrays. !--------------------------------------------------------------------------------------------------! SUBROUTINE rs_mpi_io_create_filetypes IMPLICIT NONE INTEGER, DIMENSION(2) :: dims2 INTEGER, DIMENSION(2) :: lize2 INTEGER, DIMENSION(2) :: start2 INTEGER, DIMENSION(3) :: dims3 INTEGER, DIMENSION(3) :: lize3 INTEGER, DIMENSION(3) :: start3 ! !-- Decision, if storage with or without ghost layers. !-- Please note that the indexing of the global array always starts at 0, even in Fortran. !-- Therefore the PE local indices have to be shifted by nbgp in the case with ghost layers. IF ( include_total_domain_boundaries ) THEN lb%nxl = nxl + nbgp lb%nxr = nxr + nbgp lb%nys = nys + nbgp lb%nyn = nyn + nbgp lb%nnx = nnx lb%nny = nny lb%nx = nx + 2 * nbgp lb%ny = ny + 2 * nbgp IF ( myidx == 0 ) THEN lb%nxl = lb%nxl - nbgp lb%nnx = lb%nnx + nbgp ENDIF IF ( myidx == npex-1 .OR. npex == -1 ) THEN ! npex == 1 if -D__parallel not set lb%nxr = lb%nxr + nbgp lb%nnx = lb%nnx + nbgp ENDIF IF ( myidy == 0 ) THEN lb%nys = lb%nys - nbgp lb%nny = lb%nny + nbgp ENDIF IF ( myidy == npey-1 .OR. npey == -1 ) THEN ! npey == 1 if -D__parallel not set lb%nyn = lb%nyn + nbgp lb%nny = lb%nny + nbgp ENDIF ELSE lb%nxl = nxl lb%nxr = nxr lb%nys = nys lb%nyn = nyn lb%nnx = nnx lb%nny = nny lb%nx = nx lb%ny = ny ENDIF ! !-- Create filetype for 2d-REAL array with ghost layers around the total domain dims2(1) = lb%nx + 1 dims2(2) = lb%ny + 1 lize2(1) = lb%nnx lize2(2) = lb%nny start2(1) = lb%nxl start2(2) = lb%nys #if defined( __parallel ) CALL MPI_TYPE_CREATE_SUBARRAY( 2, dims2, lize2, start2, MPI_ORDER_FORTRAN, MPI_REAL, ft_2d, ierr ) CALL MPI_TYPE_COMMIT( ft_2d, ierr ) #endif ! !-- Create filetype for 2d-INTEGER array without ghost layers around the total domain dims2(1) = nx + 1 dims2(2) = ny + 1 lize2(1) = nnx lize2(2) = nny start2(1) = nxl start2(2) = nys #if defined( __parallel ) CALL MPI_TYPE_CREATE_SUBARRAY( 2, dims2, lize2, start2, MPI_ORDER_FORTRAN, MPI_INTEGER, ft_2di_nb, ierr ) CALL MPI_TYPE_COMMIT( ft_2di_nb, ierr ) #endif ! !-- Create filetype for 3d-REAL array dims3(1) = nz + 2 dims3(2) = lb%nx + 1 dims3(3) = lb%ny + 1 lize3(1) = dims3(1) lize3(2) = lb%nnx lize3(3) = lb%nny start3(1) = nzb start3(2) = lb%nxl start3(3) = lb%nys #if defined( __parallel ) CALL MPI_TYPE_CREATE_SUBARRAY( 3, dims3, lize3, start3, MPI_ORDER_FORTRAN, MPI_REAL, ft_3d, ierr ) CALL MPI_TYPE_COMMIT( ft_3d, ierr ) #endif END SUBROUTINE rs_mpi_io_create_filetypes !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> This subroutine creates file types to access 3d-soil arrays !> distributed in blocks among processes to a single file that contains the global arrays. !> It is not required for the serial mode. !--------------------------------------------------------------------------------------------------! #if defined( __parallel ) SUBROUTINE rd_mpi_io_create_filetypes_3dsoil( nzb_soil, nzt_soil ) IMPLICIT NONE INTEGER, INTENT(IN) :: nzb_soil INTEGER, INTENT(IN) :: nzt_soil INTEGER, DIMENSION(3) :: dims3 INTEGER, DIMENSION(3) :: lize3 INTEGER, DIMENSION(3) :: start3 ! !-- Create filetype for 3d-soil array dims3(1) = nzt_soil - nzb_soil + 1 dims3(2) = lb%nx + 1 dims3(3) = lb%ny + 1 lize3(1) = dims3(1) lize3(2) = lb%nnx lize3(3) = lb%nny start3(1) = nzb_soil start3(2) = lb%nxl start3(3) = lb%nys CALL MPI_TYPE_CREATE_SUBARRAY( 3, dims3, lize3, start3, MPI_ORDER_FORTRAN, MPI_REAL, & ft_3dsoil, ierr ) CALL MPI_TYPE_COMMIT( ft_3dsoil, ierr ) END SUBROUTINE rd_mpi_io_create_filetypes_3dsoil #endif !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Free all file types that have been created for MPI-IO. !--------------------------------------------------------------------------------------------------! SUBROUTINE rs_mpi_io_free_filetypes IMPLICIT NONE #if defined( __parallel ) IF ( filetypes_created ) THEN CALL MPI_TYPE_FREE( ft_2d, ierr ) CALL MPI_TYPE_FREE( ft_2di_nb, ierr ) CALL MPI_TYPE_FREE( ft_3d, ierr ) ENDIF ! !-- Free last surface filetype IF ( ft_surf /= -1 ) THEN CALL MPI_TYPE_FREE( ft_surf, ierr ) ENDIF #endif END SUBROUTINE rs_mpi_io_free_filetypes !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Print the restart data file header (MPI-IO format) for debugging. !--------------------------------------------------------------------------------------------------! SUBROUTINE rs_mpi_io_print_header IMPLICIT NONE INTEGER(iwp) :: i IF ( debug_level >= 1 ) THEN WRITE (9,*) ' ' WRITE (9,*) ' CHARACTER header values ', tgh%nr_char WRITE (9,*) ' ' DO i = 1, tgh%nr_char WRITE(9,*) text_lines(i)(1:80) ENDDO WRITE (9,*) ' ' WRITE (9,*) ' INTEGER header values ', tgh%nr_int WRITE (9,*) ' ' DO i = 1, tgh%nr_int WRITE(9,*) 'INTERGER value: ', int_names(i), ' ', int_values(i) ENDDO WRITE (9,*) ' ' WRITE (9,*) ' REAL header values ', tgh%nr_real WRITE (9,*) ' ' DO i = 1, tgh%nr_real WRITE(9,*) 'REAL value: ', real_names(i), ' ', real_values(i) ENDDO WRITE (9,*) ' ' WRITE (9,*) ' Header entries with Offset ',tgh%nr_arrays WRITE (9,*) ' Name Offset ' DO i = 1, tgh%nr_arrays WRITE(9,'(a,1x,a30,1x,i16)') 'Header entiy: ', array_names(i), array_offset(i) ENDDO WRITE (9,*) ' ' ENDIF print_header_now = .FALSE. END SUBROUTINE rs_mpi_io_print_header !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Print error messages for reading/writing restart data with MPI-IO !--------------------------------------------------------------------------------------------------! SUBROUTINE rs_mpi_io_error( error_number ) IMPLICIT NONE INTEGER, INTENT(IN) :: error_number IF ( myid == 0) THEN SELECT CASE (error_number) CASE ( 1 ) WRITE(6,*) 'illegal action while opening restart File' CASE ( 2 ) WRITE(6,*) 'data array not found in restart File' CASE ( 3 ) WRITE(6,*) 'INTEGER or REAL value not found in restart File' CASE ( 4 ) WRITE(6,*) 'Arrays only array with nbgp Halos or without halos legal' CASE ( 5 ) WRITE(6,*) 'outer boundary in model and outer boundary in restart file do not match' CASE ( 6 ) WRITE(6,*) 'posix IO: ERROR Opening Restart File' CASE DEFAULT WRITE(6,*) 'rs_mpi_io_error: illegal error number: ',error_number END SELECT ENDIF #if defined( __parallel ) CALL MPI_BARRIER( comm2d, ierr ) CALL MPI_ABORT( comm2d, 1, ierr ) #else CALL ABORT #endif END SUBROUTINE rs_mpi_io_error !--------------------------------------------------------------------------------------------------! ! Description: ! ------------ !> Check if big/little endian data format is used. !> An int*4 pointer is set to a int*8 variable, the int*8 is set to 1, and then it is checked, if !> the first 4 bytes of the pointer are equal 1 (little endian) or not. !--------------------------------------------------------------------------------------------------! SUBROUTINE rs_mpi_io_check_endian( i_endian ) IMPLICIT NONE INTEGER, INTENT(out) :: i_endian INTEGER(KIND=8), TARGET :: int8 INTEGER, DIMENSION(1) :: bufshape INTEGER(KIND=4), POINTER, DIMENSION(:) :: int4 TYPE(C_PTR) :: ptr ptr = C_LOC( int8 ) bufshape(1) = 2 CALL C_F_POINTER( ptr, int4, bufshape ) int8 = 1 IF ( int4(1) == 1 ) THEN i_endian = 1 ! little endian ELSE i_endian = 2 ! big endian ENDIF END SUBROUTINE rs_mpi_io_check_endian END MODULE restart_data_mpi_io_mod