Changeset 206 for palm/trunk/SOURCE


Ignore:
Timestamp:
Oct 13, 2008 2:59:11 PM (16 years ago)
Author:
raasch
Message:

ocean-atmosphere coupling realized with MPI-1, adjustments in mrun, mbuild, subjob for lcxt4

Location:
palm/trunk/SOURCE
Files:
1 added
12 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/CURRENT_MODIFICATIONS

    r198 r206  
    11New:
    22---
     3Restart runs on SGI-ICE are working (mrun).
     42d-decomposition is default on SGI-ICE systems. (init_pegrid)
    35
     6Ocean-atmosphere coupling realized with MPI-1. mrun adjusted for this case
     7(-Y option). Adjustments in mrun, mbuild, and subjob for lcxt4.
     8
     9
     10check_for_restart, check_parameters, init_dvrp, init_pegrid, local_stop, modules, palm, surface_coupler, timestep
     11Makefile, mrun, mbuild, subjob
     12
     13New: init_coupling
    414
    515
     
    818
    919
    10 
    1120Errors:
    1221------
    1322
     23Bugfix: error in zu index in case of section_xy = -1 (header)
     24
     25header
  • palm/trunk/SOURCE/Makefile

    r151 r206  
    44# Actual revisions:
    55# -----------------
    6 # +plant_canopy_model, inflow_turbulence
    7 #
    8 # +surface_coupler
     6# +init_coupling
    97#
    108# Former revisions:
    119# -----------------
    1210# $Id$
     11#
     12# 151 2008-03-07 13:42:18Z raasch
     13# +plant_canopy_model, inflow_turbulence
     14# +surface_coupler
    1315#
    1416# 96 2007-06-04 08:07:41Z raasch
     
    5658        fft_xy.f90 flow_statistics.f90 global_min_max.f90 header.f90 \
    5759        impact_of_latent_heat.f90 inflow_turbulence.f90 init_1d_model.f90 \
    58         init_3d_model.f90 init_advec.f90 init_cloud_physics.f90 init_dvrp.f90 \
    59         init_grid.f90 init_ocean.f90 init_particles.f90 init_pegrid.f90 \
     60        init_3d_model.f90 init_advec.f90 init_cloud_physics.f90 init_coupling.f90 \
     61        init_dvrp.f90 init_grid.f90 init_ocean.f90 init_particles.f90 init_pegrid.f90 \
    6062        init_pt_anomaly.f90 init_rankine.f90 init_slope.f90 \
    6163        interaction_droplets_ptq.f90 local_flush.f90 local_getenv.f90 \
     
    8991        flow_statistics.o global_min_max.o header.o impact_of_latent_heat.o \
    9092        inflow_turbulence.o init_1d_model.o init_3d_model.o init_advec.o init_cloud_physics.o \
    91         init_dvrp.o init_grid.o init_ocean.o init_particles.o init_pegrid.o \
     93        init_coupling.o init_dvrp.o init_grid.o init_ocean.o init_particles.o init_pegrid.o \
    9294        init_pt_anomaly.o init_rankine.o init_slope.o \
    9395        interaction_droplets_ptq.o local_flush.o local_getenv.o local_stop.o \
     
    188190init_advec.o: modules.o
    189191init_cloud_physics.o: modules.o
     192init_coupling.o: modules.o
    190193init_dvrp.o: modules.o
    191194init_grid.o: modules.o
     
    245248write_compressed.o: modules.o
    246249write_var_list.o: modules.o
    247 
  • palm/trunk/SOURCE/check_for_restart.f90

    r110 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Implementation of an MPI-1 coupling: replaced myid with target_id
    77!
    88! Former revisions:
     
    6464!-- Output that job will be terminated
    6565    IF ( terminate_run  .AND.  myid == 0 )  THEN
    66        PRINT*, '*** WARNING: run will be terminated because it is running out', &
    67                     ' of job cpu limit'
     66       PRINT*, '*** WARNING: run will be terminated because it is running', &
     67                    ' out of job cpu limit'
    6868       PRINT*, '             remaining time:         ', remaining_time, ' s'
    69        PRINT*, '             termination time needed:', termination_time_needed,&
    70                     ' s'
     69       PRINT*, '             termination time needed:', &
     70                             termination_time_needed, ' s'
    7171    ENDIF
    7272
     
    8080
    8181       terminate_coupled = 3
    82        CALL MPI_SENDRECV( terminate_coupled,        1, MPI_INTEGER, myid,  0, &
    83                           terminate_coupled_remote, 1, MPI_INTEGER, myid,  0, &
     82       CALL MPI_SENDRECV( terminate_coupled,        1, MPI_INTEGER,          &
     83                          target_id, 0,                                      &
     84                          terminate_coupled_remote, 1, MPI_INTEGER,          &
     85                          target_id, 0,                                      &
    8486                          comm_inter, status, ierr )
    8587    ENDIF
     
    107109                                       'settings of'
    108110             PRINT*, '                 restart_time / dt_restart'
    109              PRINT*, '                 new restart time is: ', time_restart, ' s'
     111             PRINT*, '                 new restart time is: ', time_restart, &
     112                                       ' s'
    110113          ENDIF
    111114!
     
    114117!--       informed of another termination reason (terminate_coupled > 0) before,
    115118!--       or vice versa (terminate_coupled_remote > 0).
    116           IF ( coupling_mode /= 'uncoupled' .AND. terminate_coupled == 0  .AND. &
    117                terminate_coupled_remote == 0)  THEN
     119          IF ( coupling_mode /= 'uncoupled' .AND. terminate_coupled == 0  &
     120               .AND.  terminate_coupled_remote == 0 )  THEN
    118121
    119122             IF ( dt_restart /= 9999999.9 )  THEN
     
    122125                terminate_coupled = 5
    123126             ENDIF
    124              CALL MPI_SENDRECV(                                                 &
    125                             terminate_coupled,        1, MPI_INTEGER, myid,  0, &
    126                             terminate_coupled_remote, 1, MPI_INTEGER, myid,  0, &
    127                             comm_inter, status, ierr )
     127             CALL MPI_SENDRECV( terminate_coupled,        1, MPI_INTEGER,    &
     128                                target_id,  0,                               &
     129                                terminate_coupled_remote, 1, MPI_INTEGER,    &
     130                                target_id,  0,                               &
     131                                comm_inter, status, ierr )
    128132          ENDIF
    129133       ELSE
  • palm/trunk/SOURCE/check_parameters.f90

    r198 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Implementation of an MPI-1 coupling: replaced myid with target_id,
     7! deleted __mpi2 directives
    78!
    89! Former revisions:
     
    139140          CALL local_stop
    140141       ENDIF
    141 #if defined( __parallel )  &&  defined( __mpi2 )
    142        CALL MPI_SEND( dt_coupling, 1, MPI_REAL, myid, 11, comm_inter, ierr )
    143        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 11, comm_inter, status, ierr )
     142#if defined( __parallel )
     143       CALL MPI_SEND( dt_coupling, 1, MPI_REAL, target_id, 11, comm_inter, &
     144                      ierr )
     145       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 11, comm_inter, &
     146                      status, ierr )
    144147       IF ( dt_coupling /= remote )  THEN
    145148          IF ( myid == 0 )  THEN
     
    151154       ENDIF
    152155       IF ( dt_coupling <= 0.0 )  THEN
    153           CALL MPI_SEND( dt_max, 1, MPI_REAL, myid, 19, comm_inter, ierr )
    154           CALL MPI_RECV( remote, 1, MPI_REAL, myid, 19, comm_inter, status, &
    155                ierr )
     156          CALL MPI_SEND( dt_max, 1, MPI_REAL, target_id, 19, comm_inter, ierr )
     157          CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 19, comm_inter, &
     158                         status, ierr )
    156159          dt_coupling = MAX( dt_max, remote )
    157160          IF ( myid == 0 )  THEN
     
    162165          ENDIF
    163166       ENDIF
    164        CALL MPI_SEND( restart_time, 1, MPI_REAL, myid, 12, comm_inter, ierr )
    165        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 12, comm_inter, status, ierr )
     167       CALL MPI_SEND( restart_time, 1, MPI_REAL, target_id, 12, comm_inter, &
     168                      ierr )
     169       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 12, comm_inter, &
     170                      status, ierr )
    166171       IF ( restart_time /= remote )  THEN
    167172          IF ( myid == 0 )  THEN
     
    172177          CALL local_stop
    173178       ENDIF
    174        CALL MPI_SEND( dt_restart, 1, MPI_REAL, myid, 13, comm_inter, ierr )
    175        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 13, comm_inter, status, ierr )
     179       CALL MPI_SEND( dt_restart, 1, MPI_REAL, target_id, 13, comm_inter, &
     180                      ierr )
     181       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 13, comm_inter, &
     182                      status, ierr )
    176183       IF ( dt_restart /= remote )  THEN
    177184          IF ( myid == 0 )  THEN
     
    182189          CALL local_stop
    183190       ENDIF
    184        CALL MPI_SEND( end_time, 1, MPI_REAL, myid, 14, comm_inter, ierr )
    185        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 14, comm_inter, status, ierr )
     191       CALL MPI_SEND( end_time, 1, MPI_REAL, target_id, 14, comm_inter, ierr )
     192       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 14, comm_inter, &
     193                      status, ierr )
    186194       IF ( end_time /= remote )  THEN
    187195          IF ( myid == 0 )  THEN
     
    192200          CALL local_stop
    193201       ENDIF
    194        CALL MPI_SEND( dx, 1, MPI_REAL, myid, 15, comm_inter, ierr )
    195        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 15, comm_inter, status, ierr )
     202       CALL MPI_SEND( dx, 1, MPI_REAL, target_id, 15, comm_inter, ierr )
     203       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 15, comm_inter, &
     204                      status, ierr )
    196205       IF ( dx /= remote )  THEN
    197206          IF ( myid == 0 )  THEN
     
    202211          CALL local_stop
    203212       ENDIF
    204        CALL MPI_SEND( dy, 1, MPI_REAL, myid, 16, comm_inter, ierr )
    205        CALL MPI_RECV( remote, 1, MPI_REAL, myid, 16, comm_inter, status, ierr )
     213       CALL MPI_SEND( dy, 1, MPI_REAL, target_id, 16, comm_inter, ierr )
     214       CALL MPI_RECV( remote, 1, MPI_REAL, target_id, 16, comm_inter, &
     215                      status, ierr )
    206216       IF ( dy /= remote )  THEN
    207217          IF ( myid == 0 )  THEN
     
    212222          CALL local_stop
    213223       ENDIF
    214        CALL MPI_SEND( nx, 1, MPI_INTEGER, myid, 17, comm_inter, ierr )
    215        CALL MPI_RECV( iremote, 1, MPI_INTEGER, myid, 17, comm_inter, status, &
    216             ierr )
     224       CALL MPI_SEND( nx, 1, MPI_INTEGER, target_id, 17, comm_inter, ierr )
     225       CALL MPI_RECV( iremote, 1, MPI_INTEGER, target_id, 17, comm_inter, &
     226                      status, ierr )
    217227       IF ( nx /= iremote )  THEN
    218228          IF ( myid == 0 )  THEN
     
    223233          CALL local_stop
    224234       ENDIF
    225        CALL MPI_SEND( ny, 1, MPI_INTEGER, myid, 18, comm_inter, ierr )
    226        CALL MPI_RECV( iremote, 1, MPI_INTEGER, myid, 18, comm_inter, status, &
    227             ierr )
     235       CALL MPI_SEND( ny, 1, MPI_INTEGER, target_id, 18, comm_inter, ierr )
     236       CALL MPI_RECV( iremote, 1, MPI_INTEGER, target_id, 18, comm_inter, &
     237                      status, ierr )
    228238       IF ( ny /= iremote )  THEN
    229239          IF ( myid == 0 )  THEN
     
    237247    ENDIF
    238248
    239 #if defined( __parallel )  &&  defined( __mpi2 )
     249#if defined( __parallel )
    240250!
    241251!-- Exchange via intercommunicator
    242252    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
    243        CALL MPI_SEND( humidity, &
    244             1, MPI_LOGICAL, myid, 19, comm_inter, ierr )
     253       CALL MPI_SEND( humidity, 1, MPI_LOGICAL, target_id, 19, comm_inter, &
     254                      ierr )
    245255    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
    246        CALL MPI_RECV( humidity_remote, &
    247             1, MPI_LOGICAL, myid, 19, comm_inter, status, ierr )
     256       CALL MPI_RECV( humidity_remote, 1, MPI_LOGICAL, target_id, 19, &
     257                      comm_inter, status, ierr )
    248258    ENDIF
    249259#endif
  • palm/trunk/SOURCE/header.f90

    r200 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Bugfix: error in zu index in case of section_xy = -1
    77!
    88! Former revisions:
     
    703703                slices = TRIM( slices ) // TRIM( section_chr ) // '/'
    704704
    705                 WRITE (coor_chr,'(F10.1)')  zu(section(i,1))
     705                IF ( section(i,1) == -1 )  THEN
     706                   WRITE (coor_chr,'(F10.1)')  -1.0
     707                ELSE
     708                   WRITE (coor_chr,'(F10.1)')  zu(section(i,1))
     709                ENDIF
    706710                coor_chr = ADJUSTL( coor_chr )
    707711                coordinates = TRIM( coordinates ) // TRIM( coor_chr ) // '/'
  • palm/trunk/SOURCE/init_dvrp.f90

    r198 r206  
    77! TEST: print* statements
    88! ToDo: checking of mode_dvrp for legal values is not correct
    9 !
     9! Implementation of a MPI-1 coupling: __mpi2 adjustments for MPI_COMM_WORLD
    1010! Former revisions:
    1111! -----------------
     
    4949    USE pegrid
    5050    USE control_parameters
     51
     52!
     53!-- New coupling
     54    USE coupling
    5155
    5256    IMPLICIT NONE
     
    600604    WRITE ( 9, * ) '*** myid=', myid, ' vor DVRP_SPLIT'
    601605    CALL local_flush( 9 )
     606
     607!
     608!-- Adjustment for new MPI-1 coupling. This might be unnecessary.
     609#if defined( __mpi2 )
    602610       CALL DVRP_SPLIT( MPI_COMM_WORLD, comm_palm )
     611#else
     612    IF ( coupling_mode /= 'uncoupled' ) THEN
     613       CALL DVRP_SPLIT( comm_inter, comm_palm )
     614    ELSE
     615       CALL DVRP_SPLIT( MPI_COMM_WORLD, comm_palm )
     616    ENDIF
     617#endif
     618
    603619    WRITE ( 9, * ) '*** myid=', myid, ' nach DVRP_SPLIT'
    604620    CALL local_flush( 9 )
  • palm/trunk/SOURCE/init_pegrid.f90

    r198 r206  
    44! Actual revisions:
    55! -----------------
     6! Implementation of a MPI-1 coupling: added __parallel within the __mpi2 part
     7! 2d-decomposition is default on SGI-ICE systems
    68! ATTENTION: nnz_x undefined problem still has to be solved!!!!!!!!
    79! TEST OUTPUT (TO BE REMOVED) logging mpi2 ierr values
     
    9395!--    Automatic determination of the topology
    9496!--    The default on SMP- and cluster-hosts is a 1d-decomposition along x
    95        IF ( host(1:3) == 'ibm'  .OR.  host(1:3) == 'nec'  .OR. &
    96             host(1:2) == 'lc'   .OR.  host(1:3) == 'dec' )  THEN
     97       IF ( host(1:3) == 'ibm'  .OR.  host(1:3) == 'nec'      .OR. &
     98            ( host(1:2) == 'lc'  .AND.  host(3:5) /= 'sgi' )  .OR. &
     99             host(1:3) == 'dec' )  THEN
    97100
    98101          pdims(1) = numprocs
     
    540543#endif
    541544
     545#if defined( __parallel )
    542546#if defined( __mpi2 )
    543547!
     
    623627
    624628    ENDIF
     629#endif
    625630
    626631!
  • palm/trunk/SOURCE/local_stop.f90

    r198 r206  
    44! Actual revisions:
    55! -----------------
    6 !
    7 !
     6! Implementation of a MPI-1 coupling: replaced myid with target_id
    87!
    98! Former revisions:
     
    3433    USE control_parameters
    3534
     35
    3636#if defined( __parallel )
    3737    IF ( coupling_mode == 'uncoupled' )  THEN
     
    5555                terminate_coupled = 1
    5656                CALL MPI_SENDRECV( &
    57                      terminate_coupled,        1, MPI_INTEGER, myid,  0, &
    58                      terminate_coupled_remote, 1, MPI_INTEGER, myid,  0, &
     57                     terminate_coupled,        1, MPI_INTEGER, target_id,  0, &
     58                     terminate_coupled_remote, 1, MPI_INTEGER, target_id,  0, &
    5959                     comm_inter, status, ierr )
    6060             ENDIF
  • palm/trunk/SOURCE/modules.f90

    r198 r206  
    55! Actual revisions:
    66! -----------------
    7 !
     7! +target_id
    88!
    99! Former revisions:
     
    973973#endif
    974974    CHARACTER(LEN=5)       ::  myid_char = ''
    975     INTEGER                ::  id_inflow = 0, id_recycling = 0, myid=0, npex = -1, &
    976                                npey = -1, numprocs = 1, numprocs_previous_run = -1,&
     975    INTEGER                ::  id_inflow = 0, id_recycling = 0, myid = 0,      &
     976                               target_id, npex = -1, npey = -1, numprocs = 1,  &
     977                               numprocs_previous_run = -1,                     &
    977978                               tasks_per_node = -9999, threads_per_task = 1
    978979
  • palm/trunk/SOURCE/palm.f90

    r198 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Initialization of coupled runs modified for MPI-1 and moved to external
     7! subroutine init_coupling
    78!
    89! Former revisions:
     
    7778    CALL MPI_INIT( ierr )
    7879    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
     80    CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
    7981    comm_palm = MPI_COMM_WORLD
    8082    comm2d    = MPI_COMM_WORLD
    81 #endif
    82 
    83 #if defined( __mpi2 )
    84 !
    85 !-- Get information about the coupling mode from the environment variable
    86 !-- which has been set by the mpiexec command.
    87 !-- This method is currently not used because the mpiexec command is not
    88 !-- available on some machines
    89 !    CALL local_getenv( 'coupling_mode', 13, coupling_mode, i )
    90 !    IF ( i == 0 )  coupling_mode = 'uncoupled'
    91 !    IF ( coupling_mode == 'ocean_to_atmosphere' )  coupling_char = '_O'
    92 
    93 !
    94 !-- Get information about the coupling mode from standard input (PE0 only) and
    95 !-- distribute it to the other PEs
    96     CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
    97     IF ( myid == 0 )  THEN
    98        READ (*,*,ERR=10,END=10)  coupling_mode
    99 10     IF ( TRIM( coupling_mode ) == 'atmosphere_to_ocean' )  THEN
    100           i = 1
    101        ELSEIF ( TRIM( coupling_mode ) ==  'ocean_to_atmosphere' )  THEN
    102           i = 2
    103        ELSE
    104           i = 0
    105        ENDIF
    106     ENDIF
    107     CALL MPI_BCAST( i, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, ierr )
    108     IF ( i == 0 )  THEN
    109        coupling_mode = 'uncoupled'
    110     ELSEIF ( i == 1 )  THEN
    111        coupling_mode = 'atmosphere_to_ocean'
    112     ELSEIF ( i == 2 )  THEN
    113        coupling_mode = 'ocean_to_atmosphere'
    114     ENDIF
    115     IF ( coupling_mode == 'ocean_to_atmosphere' )  coupling_char = '_O'
     83
     84!
     85!-- Initialize PE topology in case of coupled runs
     86    CALL init_coupling
    11687#endif
    11788
     
    12495    CALL cpu_log( log_point(1), 'total', 'start' )
    12596    CALL cpu_log( log_point(2), 'initialisation', 'start' )
     97
     98!
     99!-- Open a file for debug output
     100    WRITE (myid_char,'(''_'',I4.4)')  myid
     101    OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
    126102
    127103!
     
    132108#if defined( __parallel )
    133109    CALL MPI_COMM_RANK( comm_palm, myid, ierr )
    134 #endif
    135 
    136 !
    137 !-- Open a file for debug output
    138     WRITE (myid_char,'(''_'',I4.4)')  myid
    139     OPEN( 9, FILE='DEBUG'//TRIM( coupling_char )//myid_char, FORM='FORMATTED' )
    140 
    141 #if defined( __mpi2 )
    142110!
    143111!-- TEST OUTPUT (TO BE REMOVED)
    144112    WRITE(9,*) '*** coupling_mode = "', TRIM( coupling_mode ), '"'
    145113    CALL LOCAL_FLUSH( 9 )
    146     print*, '*** PE', myid, '  ', TRIM( coupling_mode )
     114    PRINT*, '*** PE', myid, ' Global target PE:', target_id, &
     115            TRIM( coupling_mode )
    147116#endif
    148117
     
    220189#if defined( __mpi2 )
    221190!
    222 !-- Test exchange via intercommunicator
     191!-- Test exchange via intercommunicator in case of a MPI-2 coupling
    223192    IF ( coupling_mode == 'atmosphere_to_ocean' )  THEN
    224193       i = 12345 + myid
     
    240209
    241210 END PROGRAM palm
    242 
  • palm/trunk/SOURCE/surface_coupler.f90

    r110 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Implementation of a MPI-1 Coupling: replaced myid with target_id,
     7! deleted __mpi2 directives
    78!
    89! Former revisions:
     
    3233    REAL    ::  simulated_time_remote
    3334
    34 #if defined( __parallel )  &&  defined( __mpi2 )
     35#if defined( __parallel )
    3536
    36     CALL cpu_log( log_point(39), 'surface_coupler', 'start' )
     37       CALL cpu_log( log_point(39), 'surface_coupler', 'start' )
    3738
    3839!
     
    4344!-- If necessary, the coupler will be called at the beginning of the next
    4445!-- restart run.
    45     CALL MPI_SENDRECV( terminate_coupled,        1, MPI_INTEGER, myid,  0, &
    46                        terminate_coupled_remote, 1, MPI_INTEGER, myid,  0, &
    47                        comm_inter, status, ierr )
     46    CALL MPI_SENDRECV( terminate_coupled,        1, MPI_INTEGER, target_id,  &
     47                       0, &
     48                       terminate_coupled_remote, 1, MPI_INTEGER, target_id,  &
     49                       0, comm_inter, status, ierr )
    4850    IF ( terminate_coupled_remote > 0 )  THEN
    4951       IF ( myid == 0 )  THEN
     
    6466!-- Exchange the current simulated time between the models,
    6567!-- currently just for testing
    66     CALL MPI_SEND( simulated_time, 1, MPI_REAL, myid, 11, comm_inter, ierr )
    67     CALL MPI_RECV( simulated_time_remote, 1, MPI_REAL, myid, 11, &
     68    CALL MPI_SEND( simulated_time, 1, MPI_REAL, target_id, 11, &
     69                   comm_inter, ierr )
     70    CALL MPI_RECV( simulated_time_remote, 1, MPI_REAL, target_id, 11, &
    6871                   comm_inter, status, ierr )
    6972    WRITE ( 9, * )  simulated_time, ' remote: ', simulated_time_remote
     
    7881       WRITE ( 9, * )  '*** send shf to ocean'
    7982       CALL local_flush( 9 )
    80        CALL MPI_SEND( shf(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 12, &
     83       CALL MPI_SEND( shf(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 12, &
    8184                      comm_inter, ierr )
    82        WRITE ( 9, * )  '    ready'
    83        CALL local_flush( 9 )
    8485
    8586!
     
    8889          WRITE ( 9, * )  '*** send qsws to ocean'
    8990          CALL local_flush( 9 )
    90           CALL MPI_SEND( qsws(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 13, &
     91          CALL MPI_SEND( qsws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 13, &
    9192               comm_inter, ierr )
    92           WRITE ( 9, * )  '    ready'
    93           CALL local_flush( 9 )
    9493       ENDIF
    9594
     
    9897       WRITE ( 9, * )  '*** receive pt from ocean'
    9998       CALL local_flush( 9 )
    100        CALL MPI_RECV( pt(0,nys-1,nxl-1), 1, type_xy, myid, 14, comm_inter, &
    101                       status, ierr )
    102        WRITE ( 9, * )  '    ready'
    103        CALL local_flush( 9 )
     99       CALL MPI_RECV( pt(0,nys-1,nxl-1), 1, type_xy, target_id, 14, &
     100                      comm_inter, status, ierr )
    104101
    105102!
     
    107104       WRITE ( 9, * )  '*** send usws to ocean'
    108105       CALL local_flush( 9 )
    109        CALL MPI_SEND( usws(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 15, &
     106       CALL MPI_SEND( usws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 15, &
    110107                      comm_inter, ierr )
    111        WRITE ( 9, * )  '    ready'
    112        CALL local_flush( 9 )
    113108
    114109!
     
    116111       WRITE ( 9, * )  '*** send vsws to ocean'
    117112       CALL local_flush( 9 )
    118        CALL MPI_SEND( vsws(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 16, &
     113       CALL MPI_SEND( vsws(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 16, &
    119114                      comm_inter, ierr )
    120        WRITE ( 9, * )  '    ready'
    121        CALL local_flush( 9 )
    122115
    123116    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
     
    127120       WRITE ( 9, * )  '*** receive tswst from atmosphere'
    128121       CALL local_flush( 9 )
    129        CALL MPI_RECV( tswst(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 12, &
     122       CALL MPI_RECV( tswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 12, &
    130123                      comm_inter, status, ierr )
    131        WRITE ( 9, * )  '    ready'
    132        CALL local_flush( 9 )
    133124
    134125!
     
    138129          WRITE ( 9, * )  '*** receive qswst_remote from atmosphere'
    139130          CALL local_flush( 9 )
    140           CALL MPI_RECV( qswst_remote(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, &
    141                13, comm_inter, status, ierr )
    142           WRITE ( 9, * )  '    ready'
    143           CALL local_flush( 9 )
     131          CALL MPI_RECV( qswst_remote(nys-1,nxl-1), ngp_xy, MPI_REAL, &
     132                         target_id, 13, comm_inter, status, ierr )
    144133
    145134          !here tswst is still the sum of atmospheric bottom heat fluxes
     
    165154       WRITE ( 9, * )  '*** send pt to atmosphere'
    166155       CALL local_flush( 9 )
    167        CALL MPI_SEND( pt(nzt,nys-1,nxl-1), 1, type_xy, myid, 14, comm_inter, &
    168                       ierr )
    169        WRITE ( 9, * )  '    ready'
    170        CALL local_flush( 9 )
     156       CALL MPI_SEND( pt(nzt,nys-1,nxl-1), 1, type_xy, target_id, 14, &
     157                      comm_inter, ierr )
    171158
    172159!
     
    175162       WRITE ( 9, * )  '*** receive uswst from atmosphere'
    176163       CALL local_flush( 9 )
    177        CALL MPI_RECV( uswst(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 15, &
     164       CALL MPI_RECV( uswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 15, &
    178165                      comm_inter, status, ierr )
    179        WRITE ( 9, * )  '    ready'
    180        CALL local_flush( 9 )
    181166
    182167!
     
    185170       WRITE ( 9, * )  '*** receive vswst from atmosphere'
    186171       CALL local_flush( 9 )
    187        CALL MPI_RECV( vswst(nys-1,nxl-1), ngp_xy, MPI_REAL, myid, 16, &
     172       CALL MPI_RECV( vswst(nys-1,nxl-1), ngp_xy, MPI_REAL, target_id, 16, &
    188173                      comm_inter, status, ierr )
    189        WRITE ( 9, * )  '    ready'
    190        CALL local_flush( 9 )
    191174
    192175!
  • palm/trunk/SOURCE/timestep.f90

    r110 r206  
    44! Actual revisions:
    55! -----------------
    6 !
     6! Implementation of a MPI-1 Coupling: replaced myid with target_id
    77!
    88! Former revisions:
     
    219219             terminate_coupled = 2
    220220             CALL MPI_SENDRECV( &
    221                   terminate_coupled,        1, MPI_INTEGER, myid,  0, &
    222                   terminate_coupled_remote, 1, MPI_INTEGER, myid,  0, &
     221                  terminate_coupled,        1, MPI_INTEGER, target_id,  0, &
     222                  terminate_coupled_remote, 1, MPI_INTEGER, target_id,  0, &
    223223                  comm_inter, status, ierr )
    224224          ENDIF
Note: See TracChangeset for help on using the changeset viewer.