Changeset 2365 for palm/trunk
- Timestamp:
- Aug 21, 2017 2:59:59 PM (7 years ago)
- Location:
- palm/trunk
- Files:
-
- 1 added
- 17 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SCRIPTS/mbuild
r2316 r2365 27 27 # ----------------- 28 28 # $Id$ 29 # Added lckea & lckeam. KIT/IMK-IFU Garmisch cluster. LRZ (SadiqHuq) 30 # 31 # 2316 2017-07-20 07:53:42Z maronga 29 32 # Removed qmake block as mrungui now runs as a python script 30 33 # … … 531 534 # DETERMINE IP-ADDRES OF THE REMOTE-HOST 532 535 case $remote_host in 536 (lcbwuni) remote_address="129.13.82.89";; 533 537 (lcbullhh) remote_address=136.172.50.13;; 534 538 (lccrayb) remote_address=130.73.233.1;; … … 541 545 (lckiaps) remote_address=118.128.66.223;; 542 546 (lckyut) remote_address=133.5.4.37;; 547 (lclrz) remote_address=129.187.20.240;; 548 (lckea*) remote_address=172.27.80.109;; 543 549 (lctit) remote_address=10.1.6.170;; 544 550 (lcxe6) remote_address=129.177.20.113;; … … 547 553 (ibmkisti) remote_address=150.183.146.24;; 548 554 (ibmku) remote_address=133.5.4.129;; 555 (ibmmuc) remote_address=129.187.11.197;; 549 556 (ibms) remote_address=150.183.5.101;; 550 557 (nech) remote_address=136.172.44.192;; … … 783 790 elif [[ $(echo $remote_host | cut -c1-3) = ibm && $(echo $string | cut -c1-3) = ibm ]] 784 791 then 785 cpp_options="${cpp_options},-D__ibm" 792 if [[ $remote_host = ibmmuc ]] 793 then 794 cpp_options="${cpp_options}" 795 else 796 cpp_options="${cpp_options},-D__ibm" 797 fi 786 798 elif [[ $(echo $remote_host | cut -c1-3) = nec && $(echo $string | cut -c1-3) = nec ]] 787 799 then … … 790 802 if [[ $(echo $remote_host | cut -c1-3) = ibm ]] 791 803 then 792 cpp_options="${cpp_options},-D__$string" 804 if [[ $remote_host = ibmmuc ]] 805 then 806 cpp_options="${cpp_options} -D__parallel" 807 else 808 cpp_options="${cpp_options},-D__$string" 809 fi 793 810 else 794 811 cpp_options="$cpp_options -D__$string " -
palm/trunk/SCRIPTS/mrun
r2303 r2365 27 27 # ----------------- 28 28 # $Id$ 29 # Added lckea & lckeam. KIT/IMK-IFU Garmisch cluster. LRZ (SadiqHuq) 30 # Vertical grid nesting: set vnested_mode. -N Procs for fine and coarse grid. 31 # 32 # 2303 2017-07-04 12:26:18Z raasch 29 33 # bugfix: setting default value for write_binary 30 34 # … … 334 338 module_calls="" 335 339 mrun_script_name=$mc 340 vnested_dist="" 341 vnested_mode="vnested_twi" 336 342 netcdf_inc="" 337 343 netcdf_lib="" … … 341 347 numprocs_atmos=0 342 348 numprocs_ocean=0 349 numprocs_crse=0 350 numprocs_fine=0 343 351 OOPT="" 344 352 openmp=false … … 373 381 run_coupled_model=false 374 382 run_mode="" 383 run_vnested_model=false 375 384 dashes=" ----------------------------------------------------------------------------" 376 385 silent=false … … 475 484 # READ SHELLSCRIPT-OPTIONS AND REBUILD THE MRUN-COMMAND STRING (MC), 476 485 # WHICH WILL BE USED TO START RESTART-JOBS 477 while getopts :a:bBc:Cd:Fg:G:h:H:i:kK:m:M:n: o:O:p:P:q:r:R:s:St:T:u:U:vw:xX:yY:zZ option486 while getopts :a:bBc:Cd:Fg:G:h:H:i:kK:m:M:n:N:o:O:p:P:q:r:R:s:St:T:u:U:vw:xX:yY:zZ option 478 487 do 479 488 case $option in … … 495 504 (M) makefile=$OPTARG; mc="$mc -M$OPTARG";; 496 505 (n) node_usage=$OPTARG; mc="$mc -n$OPTARG";; 506 (N) run_vnested_model=true; vnested_dist=$OPTARG; mc="$mc -N'$OPTARG'";; 497 507 (o) output_list=$OPTARG; mc="$mc -o'$OPTARG'";; 498 508 (O) use_openmp=true; threads_per_task=$OPTARG; mc="$mc -O$OPTARG";; … … 550 560 printf "\n -M Makefile name Makefile" 551 561 printf "\n -n node usage (shared/not_shared) depending on -h" 562 printf "\n -N Vertical grid nesting. Number of" 563 printf "\n PE for Coarse and Fine grid" 552 564 printf "\n -o OUTPUT control list \"\" " 553 565 printf "\n -O threads per openMP task ---" … … 713 725 fi 714 726 727 # NESTING (-N) selected and vnesting_mode specified 728 if [[ $run_vnested_model = true ]] 729 then 730 731 if [[ -n $vnested_dist ]] 732 then 733 734 numprocs_crse=`echo $vnested_dist | cut -d" " -s -f1` 735 numprocs_fine=`echo $vnested_dist | cut -d" " -s -f2` 736 737 if (( $numprocs_crse + $numprocs_fine != $numprocs )) 738 then 739 740 printf "\n +++ number of processors does not fit to specification by \"-N\"." 741 printf "\n PEs (total) : $numprocs" 742 printf "\n PEs (Coarse) : $numprocs_crse" 743 printf "\n PEs (Fine) : $numprocs_fine" 744 locat=vnesting; exit 745 746 fi 747 748 else 749 750 printf "\n +++ "Specify PE for fine and coarse grid: -N "nCGPE nFGPE \"-N\"." 751 locat=vnesting; exit 752 753 fi 754 vnested_dist=`echo "$numprocs_crse $numprocs_fine"` 755 fi 756 715 757 # SAVE VALUES OF MRUN-OPTIONS SICHERN IN ORDER TO OVERWRITE 716 758 # THOSE VALUES GIVEN IN THE CONFIGURATION-FILE … … 1195 1237 (ibmh) queue=cluster;; 1196 1238 (ibmkisti) queue=class.32plus;; 1239 (ibmmuc*) queue=test;; 1240 (lcbwuni) queue=develop;; 1197 1241 (lcbullhh) queue=compute;; 1198 1242 (lccrayb) queue=mpp1q;; … … 1203 1247 (lckyuh) queue=fx-single;; 1204 1248 (lckyut) queue=cx-single;; 1249 (lclrz) queue=mpp2;; 1205 1250 (lctit) queue=S;; 1206 1251 (unics) queue=unics;; … … 1764 1809 1765 1810 # SET PREPROCESSOR-DIRECTIVES TO SELECT OPERATING SYSTEM SPECIFIC CODE 1766 if [[ $(echo $localhost | cut -c1-3) = ibm ]]1811 if [[ $(echo $localhost | cut -c1-3) = ibm && $localhost != ibmmuc* ]] 1767 1812 then 1768 1813 cpp_options="${cpp_options},-D__ibm=__ibm" … … 1778 1823 1779 1824 # SET DIRECTIVES GIVEN BY OPTION -K (E.G. parallel) 1780 if [[ $(echo $localhost | cut -c1-3) = ibm ]]1825 if [[ $(echo $localhost | cut -c1-3) = ibm && $localhost != ibmmuc ]] 1781 1826 then 1782 1827 [[ -n $cond1 ]] && cpp_options="${cpp_options},-D__$cond1=__$cond1" … … 1893 1938 fi 1894 1939 fi 1895 TEMPDIR=$tmp_user_catalog/${usern}.$kennung 1896 1940 1941 if [[ $localhost = ibmmuc* ]] 1942 then 1943 TEMPDIR=$tmp_user_catalog/${USER}.$kennung 1944 else 1945 TEMPDIR=$tmp_user_catalog/${usern}.$kennung 1946 fi 1897 1947 1898 1948 # DETERMINE THE NAME OF THE DIRECTORY WHICH IS USED TO TEMPORARILY STORE DATA FOR RESTART RUNS … … 2792 2842 then 2793 2843 /opt/optibm/HPM_2_4_1/bin/hpmcount a.out 2844 elif [[ $localhost = ibmmuc* ]] 2845 then 2846 ulimit -c unlimited # only for debgingg 2847 echo $MP_NODES > ~/job_queue/hostfile.$kennung 2848 echo $MP_PROCS >> ~/job_queue/hostfile.$kennung 2849 cat $LOADL_HOSTFILE >> ~/job_queue/hostfile.$kennung 2850 export MP_NODES=$nodes 2851 export MP_PROCS=$numprocs 2852 # export MPI_SINGLE_THREAD=no # LRZ NetCDF 2853 # export MP_TASKS_PER_NODE=$tasks_per_node 2854 echo "Resource Info: " 2855 echo "numprocs: " $numprocs " MP_PROCS " $MP_PROCS 2856 echo "nodes: " $nodes " MP_NODES " $MP_NODES 2857 echo "tasks_per_node: " $tasks_per_node 2858 echo "threads_per_task: " $threads_per_task 2859 export OMP_NUM_THREADS=1 2860 source /lrz/sys/share/modules/init/bash 2861 module li 2862 echo "runfile_atmos" 2863 2864 if [[ $run_vnested_model = true ]] 2865 then 2866 2867 printf "\n Nested run ($numprocs_crse Coarse, $numprocs_fine Fine)" 2868 printf "\n using $nested_mode nesting" 2869 printf "\n\n" 2870 2871 echo "$vnested_mode $numprocs_crse $numprocs_fine" > runfile_atmos 2872 2873 poe ./a.out < runfile_atmos 2874 # mpiexec -n $numprocs ./a.out < runfile_atmos 2875 else 2876 echo "precursor_atmos" > runfile_atmos 2877 poe ./a.out -proc $numprocs -nodes $nodes < runfile_atmos 2878 # mpiexec -n $numprocs ./a.out < runfile_atmos 2879 2880 fi 2794 2881 else 2795 2882 if [[ $run_coupled_model = false ]] … … 2899 2986 printf "\n threads per task: $threads_per_task stacksize: unlimited" 2900 2987 fi 2901 if [[ $run_coupled_model = false ]]2988 if [[ $run_coupled_model = false && $run_vnested_model = false ]] 2902 2989 then 2903 2990 if [[ "$ocean_file_appendix" = true ]] … … 2948 3035 then 2949 3036 mpirun_rsh -hostfile $PBS_NODEFILE -np `cat $PBS_NODEFILE | wc -l` a.out < runfile_atmos 2950 3037 elif [[ $host = lclrz || $host = lcbwuni ]] 3038 then 3039 mpiexec -n $ii a.out < runfile_atmos $ROPTeS 3040 elif [[ $host = lckea* ]] 3041 then 3042 srun -n $ii a.out < runfile_atmos $ROPTeS 2951 3043 elif [[ $host = lckiaps ]] 2952 3044 then … … 2959 3051 fi 2960 3052 2961 el se2962 3053 elif [[ $run_coupled_model = true ]] 3054 then 2963 3055 # COUPLED RUN 2964 3056 (( iia = $numprocs_atmos / $threads_per_task )) … … 2992 3084 wait 2993 3085 3086 3087 elif [[ $run_vnested_model = true ]] 3088 then 3089 printf "\n Vertical Nested run ($numprocs_crse Coarse, $numprocs_fine Fine)" 3090 printf "\n using $vnested_mode vnesting" 3091 printf "\n\n" 3092 3093 echo "$vnested_mode $numprocs_crse $numprocs_fine" > runfile_atmos 3094 3095 if [[ $host = lcbwuni || $host = lclrz* ]] 3096 then 3097 mpiexec -n $ii a.out < runfile_atmos $ROPTeS 3098 elif [[ $host = lckea* ]] 3099 then 3100 srun -n $ii a.out < runfile_atmos $ROPTeS 3101 else 3102 mpirun -np $numprocs ./a.out $ROPTS < runfile_atmos 3103 fi 3104 wait 2994 3105 fi 2995 3106 … … 3657 3768 [[ "$ocean_file_appendix" = true ]] && mrun_com=${mrun_com}" -y" 3658 3769 [[ $run_coupled_model = true ]] && mrun_com=${mrun_com}" -Y \"$coupled_dist\"" 3770 [[ $run_vnested_model = true ]] && mrun_com=${mrun_com}" -N \"$vnested_dist\"" 3659 3771 [[ "$check_namelist_files" = false ]] && mrun_com=${mrun_com}" -z" 3660 3772 [[ "$combine_plot_fields" = false ]] && mrun_com=${mrun_com}" -Z" -
palm/trunk/SCRIPTS/subjob
r2295 r2365 28 28 # ----------------- 29 29 # $Id$ 30 # Added lckea & lckeam. KIT/IMK-IFU Garmisch cluster. LRZ (SadiqHuq) 31 # 32 # 2295 2017-06-27 14:25:52Z raasch 30 33 # adjustments for using lcgeohu (cirrus @ HUB) 31 34 # … … 252 255 (inferno) local_address=130.75.105.5; local_host=lcmuk;; 253 256 (irifi) local_address=130.75.105.104; local_host=lcmuk;; 257 # (i*) local_address=129.187.11.197; local_host=ibmmuc;; 254 258 (jaboticaba) local_address=150.163.25.181; local_host=lcbr;; 259 (kea*) local_address=172.27.80.109; local_host=lckeal;; 255 260 (sno) local_address=130.75.105.113; local_host=lcmuk;; 256 261 (kuma) local_address=130.75.105.115; local_host=lcmuk;; 257 262 (levanto) local_address=130.75.105.45; local_host=lcmuk;; 258 263 (login*) local_address=118.128.66.201; local_host=lckiaps;; 264 # (login*) local_address=129.187.11.197; local_host=ibmmuc;; 265 (lm*) local_address=129.187.11.197; local_host=ibmmuc;; 266 (lx*) local_address=129.187.20.240; local_host=lclrz;; 267 (mpp2*) local_address=129.187.20.105; local_host=lclrz;; 259 268 (maestro) local_address=130.75.105.2; local_host=lcmuk;; 260 269 (meller) local_address=134.106.74.155; local_host=lcfor;; … … 278 287 (tc*) local_address="ocean"; local_host=lcocean;; 279 288 (t2a*) local_address=10.1.6.165; local_host=lctit;; 289 (uc1n*) local_address=129.13.82.89; local_host=lcbwuni;; 280 290 (urban*) local_address=147.46.30.151 local_host=lcsb;; 281 291 (vinessa) local_address=130.75.105.112; local_host=lcmuk;; … … 409 419 (ibmku) queue=s4; remote_address=133.5.4.129; submcom=/usr/local/bin/llsubmit;; 410 420 (ibms) queue=p_normal; remote_address=150.183.5.101; submcom=/usr/lpp/LoadL/full/bin/llsubmit;; 421 (ibmmuc) remote_address=129.187.11.197; submcom=/usr/bin/llsubmit;; 422 (lcbwuni) queue=develop; remote_address=129.13.82.89; submcom=/opt/moab/bin/msub;; 411 423 (lcbullhh) queue=compute; remote_address=136.172.50.13; submcom=/usr/bin/sbatch;; 412 424 (lccrayb) queue=mpp1testq; remote_address=130.73.233.1; submcom="/opt/moab/default/bin/msub -E";; … … 417 429 (lckyoto) remote_address=133.3.51.11; submcom=/thin/local/bin/qsub;; 418 430 (lck) remote_address=165.132.26.61; submcom=/usr/torque/bin/qsub;; 431 (lckeal) queue=ivy; remote_address=172.27.80.109; submcom=/usr/bin/sbatch;; 419 432 (lckiaps) remote_address=118.128.66.201; submcom=/opt/pbs/default/bin/qsub;; 420 433 (lckordi) remote_address=210.219.61.8; submcom=/usr/torque/bin/qsub;; 421 434 (lckyuh) remote_address=133.5.4.33; submcom=/usr/bin/pjsub;; 422 435 (lckyut) remote_address=133.5.4.37; submcom=/usr/bin/pjsub;; 436 (lclrz) remote_address=129.187.20.240; submcom=/usr/bin/sbatch;; 423 437 (lcocean) remote_address="ocean"; submcom=qsub;; 424 438 (lcsb) remote_address=147.46.30.151; submcom=/usr/torque/bin/qsub;; … … 459 473 (*) error=true;; 460 474 esac;; 475 (ibmmuc*) case $ndq in 476 (test|micro|general|large|fat|fattest|special|tmp1|tmp2) error=false;; 477 (*) error=true;; 478 esac;; 461 479 (lcbullhh) case $ndq in 462 480 (compute|compute2|shared) error=false;; 463 481 (*) error=true;; 464 482 esac;; 483 (lcbwuni) case $ndq in 484 (develop|singlenode|multinode|verylong|fat) error=false;; 485 (*) error=true;; 486 esac;; 465 487 (lccrayb) case $ndq in 466 488 (dataq|mpp1q|mpp1testq|mpp2q|mpp2testq|smp1q|smp1testq|specialm1q) error=false;; … … 493 515 (lckyut) case $ndq in 494 516 (cx-dbg|cx-single|cx-small|cx-middle|cx-large) error=false;; 517 (*) error=true;; 518 esac;; 519 (lclrz) case $ndq in 520 (mpp1|mpp2|iuv2|myri) error=false;; 495 521 (*) error=true;; 496 522 esac;; … … 744 770 then 745 771 746 if [[ $remote_host != ibmkisti ]] 772 if [[ $remote_host == ibmmuc* ]] 773 then 774 cat > $job_to_send << %%END%% 775 776 #!/bin/bash 777 # @ job_type = parallel 778 # @ job_name = $job_name 779 # @ output = $remote_dayfile 780 # @ error = $remote_dayfile 781 # @ wall_clock_limit = $timestring 782 $class 783 $mcm_affinity_options 784 $task_affinity 785 $notify_user 786 # @ network.MPI = sn_all,not_shared,us 787 # @ notification = always 788 # @ energy_policy_tag = table_kit_ifu 789 # @ minimize_time_to_solution = yes 790 # @ node = $nodes 791 # @ total_tasks = $numprocs 792 # @ node_topology = island 793 # @ island_count = 1,2 794 # @ environment = LD_LIBRARY_PATH=/lrz/sys/libraries/netcdf/4.2.1.1_impi4/lib:/lrz/sys/libraries/hdf5/1.8.15/ibmmpi/lib:/lrz/sys/libraries/fftw/3.3.3/avx/lib/ 795 # @ queue 796 797 %%END%% 798 799 elif [[ $remote_host != ibmkisti ]] 747 800 then 748 801 … … 1248 1301 submcom="$submcom -W group_list=$group_number -N $job_name -l walltime=$timestring -l select=$nodes:ncpus=$processes_per_node:mpiprocs=$tasks_per_node:mem=${Memory}gb -l place=scatter -o $remote_dayfile -j oe -et 1 -q $queue " 1249 1302 fi 1303 1304 elif [[ $remote_host = lclrz ]] 1305 then 1306 cat > $job_to_send << %%END%% 1307 #!/bin/bash 1308 #SBATCH -J $job_name 1309 #SBATCH -t $timestring 1310 #SBATCH -N $nodes 1311 #SBATCH --ntasks-per-node=$processes_per_node 1312 #SBATCH --get-user-env 1313 #SBATCH -o $remote_dayfile 1314 #SBATCH -e $remote_dayfile 1315 #SBATCH --mail-user=${email_notification} 1316 #SBATCH --clusters=$queue 1317 1318 $init_cmds 1319 1320 $module_calls 1321 1322 %%END%% 1323 1324 elif [[ $remote_host = lckea* ]] 1325 then 1326 keal_tasks_per_core=1 1327 1328 if [[ $queue = haswell || $queue = ivy* ]] 1329 then 1330 if (( tasks_per_node > 20 )) 1331 then 1332 keal_tasks_per_core=2 1333 fi 1334 fi 1335 1336 cat > $job_to_send << %%END%% 1337 #!/bin/bash 1338 #SBATCH -J $job_name 1339 #SBATCH -t $timestring 1340 #SBATCH -N $nodes 1341 #SBATCH --ntasks-per-node=$processes_per_node 1342 #SBATCH --ntasks-per-core=$keal_tasks_per_core 1343 #SBATCH --mem-per-cpu=${memory}mb 1344 #SBATCH --get-user-env 1345 #SBATCH -o $remote_dayfile 1346 #SBATCH -e $remote_dayfile 1347 #SBATCH --mail-user=${email_notification} 1348 #SBATCH --mail-type=ALL 1349 #SBATCH --partition=$queue 1350 1351 export MV2_ENABLE_AFFINITY=0 1352 1353 $init_cmds 1354 1355 $module_calls 1356 1357 %%END%% 1358 1359 elif [[ $remote_host=lcbwuni ]] 1360 then 1361 if [[ $email_notification = none ]] 1362 then 1363 email_directive="" 1364 else 1365 email_directive="#PBS -M $email_notification" 1366 fi 1367 cat > $job_to_send << %%END%% 1368 #!/bin/ksh 1369 #PBS -N $job_name 1370 #PBS -l walltime=$timestring 1371 #PBS -l nodes=${nodes}:ppn=$processes_per_node 1372 #PBS -l pmem=${memory}mb 1373 #PBS -m abe 1374 #PBS -o $remote_dayfile 1375 #PBS -j oe 1376 #PBS -q $queue 1377 $email_directive 1378 %%END%% 1250 1379 1251 1380 else … … 1615 1744 eval $submcom $job_on_remhost 1616 1745 fi 1746 elif [[ $local_host = lclrz || $local_host = lckea* ]] 1747 then 1748 eval $submcom $job_on_remhost 1749 elif [[ $local_host = lcbwuni ]] 1750 then 1751 msub -q $queue $job_on_remhost 1617 1752 else 1618 1753 qsub $job_on_remhost -
palm/trunk/SOURCE/Makefile
r2359 r2365 25 25 # ----------------- 26 26 # $Id$ 27 # Added dependencies for vertical_nesting_mod 28 # 29 # 2339 2017-08-07 13:55:26Z gronemeier 27 30 # corrected timestamp in header 28 31 # … … 409 412 user_lpm_init.f90 user_lpm_set_attributes.f90 user_module.f90 \ 410 413 user_parin.f90 user_read_restart_data.f90 \ 411 user_spectra.f90 user_statistics.f90 v irtual_flight_mod.f90 \414 user_spectra.f90 user_statistics.f90 vertical_nesting_mod.f90 virtual_flight_mod.f90 \ 412 415 wind_turbine_model_mod.f90 write_3d_binary.f90 write_var_list.f90 413 416 … … 425 428 # The following line is needed for palm_simple_install, don't remove it! 426 429 #to_be_replaced_by_include 427 428 #BOUNDS="-Rbc" # Array bounds checking. Compromises performance seriously.429 430 F90 = ftn431 #COPT = -DMPI_REAL=MPI_DOUBLE_PRECISION -DMPI_2REAL=MPI_2DOUBLE_PRECISION -D__lc -D__parallel -D__fftw \432 # -D__netcdf -D__netcdf4 -D__netcdf4_parallel -D__nopointer433 COPT = -DMPI_REAL=MPI_DOUBLE_PRECISION -DMPI_2REAL=MPI_2DOUBLE_PRECISION -D__lc -D__parallel -D__fftw \434 -D__netcdf -D__netcdf4 -D__netcdf4_parallel435 436 #F90FLAGS = -O2 -e Fm -G2 -rm $(BOUNDS) -hnoomp437 F90FLAGS = -O2 -e F -G2 -rm $(BOUNDS) -hnoomp438 LDFLAGS = $(F90FLAGS) -dynamic439 440 # End of include441 430 442 431 .SUFFIXES: … … 469 458 average_3d_data.o: modules.o cpulog_mod.o mod_kinds.o exchange_horiz_2d.o land_surface_model_mod.o \ 470 459 radiation_model_mod.o urban_surface_mod.o 471 boundary_conds.o: modules.o mod_kinds.o pmc_interface_mod.o surface_mod.o 460 boundary_conds.o: modules.o mod_kinds.o pmc_interface_mod.o surface_mod.o vertical_nesting_mod.o 472 461 buoyancy.o: modules.o mod_kinds.o 473 462 calc_mean_profile.o: modules.o mod_kinds.o … … 480 469 microphysics_mod.o model_1d_mod.o netcdf_interface_mod.o plant_canopy_model_mod.o pmc_interface_mod.o \ 481 470 radiation_model_mod.o spectra_mod.o subsidence_mod.o synthetic_turbulence_generator_mod.o \ 482 wind_turbine_model_mod.o urban_surface_mod.o 471 wind_turbine_model_mod.o urban_surface_mod.o vertical_nesting_mod.o 483 472 close_file.o: modules.o mod_kinds.o netcdf_interface_mod.o 484 473 compute_vpt.o: modules.o mod_kinds.o … … 508 497 diffusion_v.o: modules.o mod_kinds.o surface_mod.o 509 498 diffusion_w.o: modules.o mod_kinds.o surface_mod.o 510 diffusivities.o: modules.o mod_kinds.o surface_mod.o 499 diffusivities.o: modules.o mod_kinds.o surface_mod.o vertical_nesting_mod.o 511 500 disturb_field.o: modules.o cpulog_mod.o mod_kinds.o random_function_mod.o \ 512 501 random_generator_parallel_mod.o … … 530 519 init_advec.o: modules.o mod_kinds.o 531 520 init_cloud_physics.o: modules.o mod_kinds.o 532 init_coupling.o: modules.o mod_kinds.o 521 init_coupling.o: modules.o mod_kinds.o vertical_nesting_mod.o 533 522 init_dvrp.o: modules.o mod_kinds.o 534 init_grid.o: modules.o mod_kinds.o advec_ws.o netcdf_interface_mod.o surface_mod.o 523 init_grid.o: modules.o mod_kinds.o advec_ws.o netcdf_interface_mod.o surface_mod.o vertical_nesting_mod.o 535 524 init_masks.o: modules.o mod_kinds.o netcdf_interface_mod.o 536 525 init_ocean.o: modules.o eqn_state_seawater.o mod_kinds.o 537 init_pegrid.o: modules.o mod_kinds.o spectra_mod.o synthetic_turbulence_generator_mod.o 526 init_pegrid.o: modules.o mod_kinds.o spectra_mod.o synthetic_turbulence_generator_mod.o \ 527 vertical_nesting_mod.o 538 528 init_pt_anomaly.o: modules.o mod_kinds.o 539 529 init_rankine.o: modules.o mod_kinds.o … … 637 627 surface_layer_fluxes_mod.o: modules.o mod_kinds.o exchange_horiz_2d.o land_surface_model_mod.o \ 638 628 urban_surface_mod.o surface_mod.o 639 surface_mod.o: modules.o mod_kinds.o init_pegrid.omodel_1d_mod.o629 surface_mod.o: modules.o mod_kinds.o model_1d_mod.o 640 630 swap_timelevel.o: modules.o cpulog_mod.o mod_kinds.o land_surface_model_mod.o \ 641 631 pmc_interface_mod.o urban_surface_mod.o … … 648 638 spectra_mod.o user_actions.o microphysics_mod.o synthetic_turbulence_generator_mod.o \ 649 639 surface_layer_fluxes_mod.o surface_mod.o \ 650 urban_surface_mod.o v irtual_flight_mod.o wind_turbine_model_mod.o640 urban_surface_mod.o vertical_nesting_mod.o virtual_flight_mod.o wind_turbine_model_mod.o 651 641 time_integration_spinup.o: cpulog_mod.o modules.o land_surface_model_mod.o mod_kinds.o radiation_model_mod.o surface_layer_fluxes_mod.o urban_surface_mod.o 652 642 time_to_string.o: mod_kinds.o … … 689 679 user_spectra.o: modules.o mod_kinds.o spectra_mod.o user_module.o 690 680 user_statistics.o: modules.o mod_kinds.o netcdf_interface_mod.o user_module.o 681 vertical_nesting_mod.o: modules.o mod_kinds.o surface_mod.o 691 682 virtual_flight_mod.o: modules.o cpulog_mod.o mod_kinds.o netcdf_interface_mod.o user_init_flight.o user_flight.o 692 683 wind_turbine_model_mod.o: modules.o cpulog_mod.o mod_kinds.o -
palm/trunk/SOURCE/boundary_conds.f90
r2320 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented: exclude setting vertical velocity to zero 28 ! on fine grid (SadiqHuq) 29 ! 30 ! 2320 2017-07-21 12:47:43Z suehring 27 31 ! Remove unused control parameter large_scale_forcing from only-list 28 32 ! … … 166 170 USE control_parameters, & 167 171 ONLY: bc_pt_t_val, bc_q_t_val, bc_s_t_val, constant_diffusion, & 168 cloud_physics, dt_3d, humidity,&172 cloud_physics, coupling_mode, dt_3d, humidity, & 169 173 ibc_pt_b, ibc_pt_t, ibc_q_b, ibc_q_t, ibc_s_b, ibc_s_t, & 170 174 ibc_sa_t, ibc_uv_b, ibc_uv_t, inflow_l, inflow_n, inflow_r, & … … 237 241 ENDIF 238 242 239 IF ( .NOT. nest_domain ) THEN 240 w_p(nzt:nzt+1,:,:) = 0.0_wp ! nzt is not a prognostic level (but cf. pres) 243 ! 244 !-- Vertical nesting: Vertical velocity not zero at the top of the fine grid 245 IF ( .NOT. nest_domain .AND. & 246 TRIM(coupling_mode) /= 'vnested_fine' ) THEN 247 w_p(nzt:nzt+1,:,:) = 0.0_wp !< nzt is not a prognostic level (but cf. pres) 241 248 ENDIF 242 249 -
palm/trunk/SOURCE/check_parameters.f90
r2354 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented: Check coupling mode. Generate file header 28 ! (SadiqHuq) 29 ! 30 ! 2354 2017-08-17 10:49:36Z schwenkel 27 31 ! Bugfix correlated to lsm_check_data_output_pr. 28 32 ! If-statement for following checks is essential, otherwise units for lsm output … … 561 565 USE wind_turbine_model_mod, & 562 566 ONLY: wtm_check_parameters, wind_turbine 567 USE vertical_nesting_mod, & 568 ONLY: vnested, vnest_check_parameters 563 569 564 570 … … 603 609 !> @todo Check if any queries for other coupling modes (e.g. precursor_ocean) are missing 604 610 IF ( coupling_mode /= 'uncoupled' .AND. & 611 coupling_mode /= 'vnested_crse' .AND. & 612 coupling_mode /= 'vnested_fine' .AND. & 605 613 coupling_mode /= 'atmosphere_to_ocean' .AND. & 606 614 coupling_mode /= 'ocean_to_atmosphere' ) THEN … … 611 619 ! 612 620 !-- Check dt_coupling, restart_time, dt_restart, end_time, dx, dy, nx and ny 613 IF ( coupling_mode /= 'uncoupled') THEN 621 IF ( coupling_mode /= 'uncoupled' .AND. & 622 coupling_mode(1:8) /= 'vnested_' ) THEN 614 623 615 624 IF ( dt_coupling == 9999999.9_wp ) THEN … … 809 818 IF ( coupling_mode == 'uncoupled' ) THEN 810 819 coupling_string = '' 820 ELSEIF ( coupling_mode == 'vnested_crse' ) THEN 821 coupling_string = ' nested (coarse)' 822 ELSEIF ( coupling_mode == 'vnested_fine' ) THEN 823 coupling_string = ' nested (fine)' 811 824 ELSEIF ( coupling_mode == 'atmosphere_to_ocean' ) THEN 812 825 coupling_string = ' coupled (atmosphere)' … … 3858 3871 ENDIF 3859 3872 3873 !-- Vertical nesting: check fine and coarse grid compatibility for data exchange 3874 IF ( vnested ) CALL vnest_check_parameters 3875 3860 3876 CALL location_message( 'finished', .TRUE. ) 3861 3877 ! -
palm/trunk/SOURCE/diffusivities.f90
r2233 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented (SadiqHuq) 28 ! 29 ! 2233 2017-05-30 18:08:54Z suehring 27 30 ! 28 31 ! 2232 2017-05-30 17:47:52Z suehring … … 108 111 ONLY : bc_h 109 112 113 USE vertical_nesting_mod, & 114 ONLY: vnest_boundary_conds_khkm, vnest_init 115 110 116 IMPLICIT NONE 111 117 … … 266 272 kh(:,nyn+1,:) = kh(:,nyn,:) 267 273 ENDIF 274 ! 275 !-- Vertical nesting: set fine grid eddy viscosity top boundary condition 276 IF ( vnest_init ) CALL vnest_boundary_conds_khkm 268 277 269 278 END SUBROUTINE diffusivities -
palm/trunk/SOURCE/init_coupling.f90
r2298 r2365 25 25 ! ------------------ 26 26 ! $Id$ 27 ! Vertical nesting implemented (SadiqHuq) 28 ! 29 ! 2298 2017-06-29 09:28:18Z raasch 27 30 ! MPI2 coupling removed 28 31 ! … … 67 70 USE pegrid 68 71 72 USE vertical_nesting_mod 73 69 74 IMPLICIT NONE 70 75 … … 96 101 IF ( TRIM( coupling_mode ) == 'coupled_run' ) THEN 97 102 i = 1 103 ELSEIF ( TRIM( coupling_mode ) == 'vnested_twi' ) THEN 104 i = 9 98 105 ELSE 99 106 i = 0 … … 111 118 i = bc_data(0) 112 119 113 IF ( i == 0 ) THEN120 IF ( i == 0 ) THEN 114 121 coupling_mode = 'uncoupled' 115 122 ! … … 121 128 CLOSE ( 90 ) 122 129 ENDIF 130 ELSEIF ( i == 9 ) THEN 131 132 ! 133 !-- Set a flag to identify runs with vertical nesting 134 vnested = .TRUE. 135 136 comm_inter = MPI_COMM_WORLD 137 138 ! 139 !-- Split the total available PE's into two groups 140 !-- numprocs for Coarse and Fine Grid are specified via mrun argument -N 141 IF ( myid < bc_data(1) ) THEN 142 inter_color = 0 143 numprocs = bc_data(1) 144 coupling_mode = 'vnested_crse' 145 ELSE 146 inter_color = 1 147 numprocs = bc_data(2) 148 coupling_mode = 'vnested_fine' 149 ENDIF 150 151 CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, inter_color, 0, comm_palm, ierr ) 152 comm2d = comm_palm 153 154 OPEN( 90, FILE='VNESTING_PORT_OPENED', FORM='FORMATTED' ) 155 WRITE ( 90, '(''TRUE'')' ) 156 CLOSE ( 90 ) 157 123 158 ELSE 124 159 comm_inter = MPI_COMM_WORLD … … 154 189 ENDIF 155 190 191 IF ( TRIM( coupling_mode ) == 'vnested_fine' ) THEN 192 ! 193 !-- Set file extension for vertical nesting 194 coupling_char = '_N' 195 ENDIF 196 156 197 END SUBROUTINE init_coupling -
palm/trunk/SOURCE/init_grid.f90
r2319 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical nesting implemented (SadiqHuq) 28 ! 29 ! 2319 2017-07-20 17:33:17Z suehring 27 30 ! Remove print statements 28 31 ! … … 250 253 canyon_height, canyon_wall_left, canyon_wall_south, & 251 254 canyon_width_x, canyon_width_y, constant_flux_layer, & 252 coupling_char, dp_level_ind_b, dz, dz_max, dz_stretch_factor, & 255 coupling_char, coupling_mode, & 256 dp_level_ind_b, dz, dz_max, dz_stretch_factor, & 253 257 dz_stretch_level, dz_stretch_level_index, grid_level, ibc_uv_b, & 254 258 io_blocks, io_group, inflow_l, inflow_n, inflow_r, inflow_s, & … … 285 289 USE surface_mod, & 286 290 ONLY: get_topography_top_index, init_bc 291 292 USE vertical_nesting_mod, & 293 ONLY: vnested, vnest_init_grid 287 294 288 295 IMPLICIT NONE … … 2000 2007 CALL exchange_horiz( l_wall, nbgp ) 2001 2008 2009 ! 2010 !-- Vertical nesting: communicate vertical grid level arrays between fine and 2011 !-- coarse grid 2012 IF ( vnested ) CALL vnest_init_grid 2002 2013 2003 2014 END SUBROUTINE init_grid -
palm/trunk/SOURCE/init_pegrid.f90
r2300 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical nesting implemented (SadiqHuq) 28 ! 29 ! 2300 2017-06-29 13:31:14Z raasch 27 30 ! host-specific settings removed 28 31 ! … … 228 231 ONLY: nxl_y, nxl_yd, nxl_z, nxr_y, nxr_yd, nxr_z, nyn_x, nyn_z, nys_x,& 229 232 nys_z, nzb_x, nzb_y, nzb_yd, nzt_x, nzt_yd, nzt_y 233 234 USE vertical_nesting_mod, & 235 ONLY: vnested, vnest_init_pegrid_domain, vnest_init_pegrid_rank 230 236 231 237 IMPLICIT NONE … … 335 341 336 342 ! 343 !-- Vertical nesting: store four lists that identify partner ranks to exchange 344 !-- data 345 IF ( vnested ) CALL vnest_init_pegrid_rank 346 347 ! 337 348 !-- Determine sub-topologies for transpositions 338 349 !-- Transposition from z to x: … … 642 653 CALL MPI_TYPE_COMMIT( type_xy, ierr ) 643 654 644 IF ( TRIM( coupling_mode ) /= 'uncoupled' ) THEN655 IF ( TRIM( coupling_mode ) /= 'uncoupled' .AND. .NOT. vnested ) THEN 645 656 646 657 ! … … 743 754 ENDIF 744 755 756 ! 757 !-- Store partner grid point co-ordinates as lists. 758 !-- Create custom MPI vector datatypes for contiguous data transfer 759 IF ( vnested ) CALL vnest_init_pegrid_domain 745 760 746 761 #endif -
palm/trunk/SOURCE/local_stop.f90
r2101 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical nesting implemented (SadiqHuq) 28 ! 29 ! 2101 2017-01-05 16:42:31Z suehring 27 30 ! 28 31 ! 2000 2016-08-20 18:09:15Z knoop … … 75 78 ONLY: nested_run 76 79 80 USE vertical_nesting_mod, & 81 ONLY: vnested 82 77 83 #if defined( __parallel ) 78 84 IF ( coupling_mode == 'uncoupled' ) THEN … … 92 98 ENDIF 93 99 ENDIF 100 ELSEIF ( coupling_mode(1:8) == 'vnested_' ) THEN 101 102 PRINT*, '+++ local_stop:' 103 PRINT*, ' model "', TRIM( coupling_mode ), '" terminated' 104 ! 105 !-- Abort both coarse and fine grid 106 CALL MPI_ABORT( MPI_COMM_WORLD, 9999, ierr ) 94 107 ELSE 95 108 -
palm/trunk/SOURCE/parin.f90
r2339 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting: add vnest_start_time to d3par (SadiqHuq) 28 ! 29 ! 2339 2017-08-07 13:55:26Z gronemeier 27 30 ! corrected timestamp in header 28 31 ! … … 354 357 ONLY: wtm_parin 355 358 359 USE vertical_nesting_mod, & 360 ONLY: vnest_start_time 356 361 357 362 IMPLICIT NONE … … 445 450 skip_time_do2d_xy, skip_time_do2d_xz, skip_time_do2d_yz, & 446 451 skip_time_do3d, skip_time_domask, synchronous_exchange, & 447 termination_time_needed, z_max_do2d452 termination_time_needed, vnest_start_time, z_max_do2d 448 453 449 454 -
palm/trunk/SOURCE/read_var_list.f90
r2339 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented (SadiqHuq) 28 ! 29 ! 2339 2017-08-07 13:55:26Z gronemeier 27 30 ! corrected timestamp in header 28 31 ! … … 251 254 USE synthetic_turbulence_generator_mod, & 252 255 ONLY: stg_read_restart_data 256 257 USE vertical_nesting_mod, & 258 ONLY: vnest_init 253 259 254 260 IMPLICIT NONE … … 785 791 READ ( 13 ) vg_vertical_gradient_level_ind 786 792 CASE ( 'virtual_flight' ) 787 READ ( 13 ) virtual_flight 793 READ ( 13 ) virtual_flight 794 CASE ( 'vnest_init' ) 795 READ ( 13 ) vnest_init 788 796 CASE ( 'volume_flow_area' ) 789 797 READ ( 13 ) volume_flow_area -
palm/trunk/SOURCE/time_integration.f90
r2320 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented (SadiqHuq) 28 ! 29 ! 2320 2017-07-21 12:47:43Z suehring 27 30 ! Set bottom boundary conditions after nesting interpolation and anterpolation 28 31 ! … … 399 402 ONLY: wind_turbine, wtm_forces 400 403 404 USE vertical_nesting_mod, & 405 ONLY: vnested, vnest_anterpolate, vnest_anterpolate_e, & 406 vnest_boundary_conds, vnest_boundary_conds_khkm, & 407 vnest_deallocate, vnest_init, vnest_init_fine, & 408 vnest_start_time 409 401 410 IMPLICIT NONE 402 411 … … 429 438 !-- Data exchange between coupled models in case that a call has been omitted 430 439 !-- at the end of the previous run of a job chain. 431 IF ( coupling_mode /= 'uncoupled' .AND. run_coupled ) THEN440 IF ( coupling_mode /= 'uncoupled' .AND. run_coupled .AND. .NOT. vnested) THEN 432 441 ! 433 442 !-- In case of model termination initiated by the local model the coupler … … 458 467 CALL cpu_log( log_point_s(10), 'timesteps', 'start' ) 459 468 ! 469 !-- Vertical nesting: initialize fine grid 470 IF ( vnested ) THEN 471 IF ( .NOT. vnest_init .AND. simulated_time >= vnest_start_time ) THEN 472 CALL cpu_log( log_point(80), 'vnest_init', 'start' ) 473 CALL vnest_init_fine 474 vnest_init = .TRUE. 475 CALL cpu_log( log_point(80), 'vnest_init', 'stop' ) 476 ENDIF 477 ENDIF 478 ! 460 479 !-- Determine ug, vg and w_subs in dependence on data from external file 461 480 !-- LSF_DATA … … 624 643 !-- Swap the time levels in preparation for the next time step. 625 644 CALL swap_timelevel 645 646 ! 647 !-- Vertical nesting: Interpolate fine grid data to the coarse grid 648 IF ( vnest_init ) THEN 649 CALL cpu_log( log_point(81), 'vnest_anterpolate', 'start' ) 650 CALL vnest_anterpolate 651 CALL cpu_log( log_point(81), 'vnest_anterpolate', 'stop' ) 652 ENDIF 626 653 627 654 IF ( nested_run ) THEN … … 736 763 IF ( intermediate_timestep_count == 1 .OR. & 737 764 call_psolver_at_all_substeps ) THEN 738 CALL pres 765 766 IF ( vnest_init ) THEN 767 ! 768 !-- Compute pressure in the CG, interpolate top boundary conditions 769 !-- to the FG and then compute pressure in the FG 770 IF ( coupling_mode == 'vnested_crse' ) CALL pres 771 772 CALL cpu_log( log_point(82), 'vnest_bc', 'start' ) 773 CALL vnest_boundary_conds 774 CALL cpu_log( log_point(82), 'vnest_bc', 'stop' ) 775 776 IF ( coupling_mode == 'vnested_fine' ) CALL pres 777 778 !-- Anterpolate TKE, satisfy Germano Identity 779 CALL cpu_log( log_point(83), 'vnest_anter_e', 'start' ) 780 CALL vnest_anterpolate_e 781 CALL cpu_log( log_point(83), 'vnest_anter_e', 'stop' ) 782 783 ELSE 784 785 CALL pres 786 787 ENDIF 788 739 789 ENDIF 740 790 … … 910 960 ! 911 961 !-- Data exchange between coupled models 912 IF ( coupling_mode /= 'uncoupled' .AND. run_coupled ) THEN 962 IF ( coupling_mode /= 'uncoupled' .AND. run_coupled & 963 .AND. .NOT. vnested ) THEN 913 964 time_coupling = time_coupling + dt_3d 914 965 … … 1154 1205 ENDDO ! time loop 1155 1206 1207 !-- Vertical nesting: Deallocate variables initialized for vertical nesting 1208 IF ( vnest_init ) CALL vnest_deallocate 1209 1156 1210 IF ( myid == 0 ) CALL finish_progress_bar 1157 1211 -
palm/trunk/SOURCE/timestep.f90
r2258 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting: Sync fine and coarse grid timestep (SadiqHuq) 28 ! 29 ! 2258 2017-06-08 07:55:13Z suehring 27 30 ! Bugfix, add pre-preprocessor directives to enable non-parrallel mode 28 31 ! … … 137 140 ONLY: flow_statistics_called, hom, u_max, u_max_ijk, v_max, v_max_ijk,& 138 141 w_max, w_max_ijk 142 143 USE vertical_nesting_mod, & 144 ONLY: vnested, vnest_timestep_sync 139 145 140 146 IMPLICIT NONE … … 376 382 ENDIF 377 383 384 ! 385 !-- Vertical nesting: coarse and fine grid timestep has to be identical 386 IF ( vnested ) CALL vnest_timestep_sync 387 378 388 CALL cpu_log( log_point(12), 'calculate_timestep', 'stop' ) 379 389 -
palm/trunk/SOURCE/write_var_list.f90
r2339 r2365 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical nesting implemented (SadiqHuq) 28 ! 29 ! 2339 2017-08-07 13:55:26Z gronemeier 27 30 ! corrected timestamp in header 28 31 ! … … 224 227 v_max_ijk, w_max, w_max_ijk 225 228 229 USE vertical_nesting_mod, & 230 ONLY: vnest_init 226 231 227 232 IMPLICIT NONE … … 695 700 WRITE ( 14 ) 'virtual_flight ' 696 701 WRITE ( 14 ) virtual_flight 702 WRITE ( 14 ) 'vnest_init ' 703 WRITE ( 14 ) vnest_init 697 704 WRITE ( 14 ) 'volume_flow_area ' 698 705 WRITE ( 14 ) volume_flow_area -
palm/trunk/UTIL/combine_plot_fields.f90
r1809 r2365 21 21 ! ----------------- 22 22 ! 23 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 ! Vertical grid nesting implemented (SadiqHuq) 28 ! 29 ! 1809 2016-04-05 20:13:28Z raasch 27 30 ! 28 31 ! 1808 2016-04-05 19:44:00Z raasch … … 120 123 LOGICAL :: avs_output, compressed, found, iso2d_output, netcdf_output, & 121 124 netcdf_parallel, netcdf_0, netcdf_1 125 LOGICAL :: vnest 122 126 123 127 REAL(wp) :: cpu_start_time, cpu_end_time, dx, simulated_time … … 126 130 REAL(spk), DIMENSION(:,:,:), ALLOCATABLE :: pf3d, pf3d_tmp 127 131 132 133 128 134 PRINT*, '' 129 135 PRINT*, '' … … 133 139 !-- Find out if a coupled run has been carried out 134 140 INQUIRE( FILE='COUPLING_PORT_OPENED', EXIST=found ) 141 INQUIRE( FILE='VNESTING_PORT_OPENED', EXIST=vnest ) 135 142 IF ( found ) THEN 136 143 models = 2 137 144 PRINT*, ' coupled run' 145 ELSEIF ( vnest ) THEN 146 models = 2 147 PRINT*, ' Vertically nested grid coupling' 138 148 ELSE 139 149 models = 1 … … 160 170 PRINT*, '*** combine_plot_fields ***' 161 171 IF ( model == 2 ) THEN 162 model_string = '_O' 163 PRINT*, ' now combining ocean data' 164 PRINT*, ' ========================' 172 IF ( vnest ) THEN 173 model_string = '_N' 174 PRINT*, ' now combining FINE data' 175 PRINT*, ' ========================' 176 ELSE 177 model_string = '_O' 178 PRINT*, ' now combining ocean data' 179 PRINT*, ' ========================' 180 ENDIF 165 181 ELSE 166 PRINT*, ' now combining atmosphere data' 167 PRINT*, ' =============================' 182 IF ( vnest ) THEN 183 PRINT*, ' now combining COARSE data' 184 PRINT*, ' =============================' 185 ELSE 186 PRINT*, ' now combining atmosphere data' 187 PRINT*, ' =============================' 188 ENDIF 168 189 ENDIF 169 190 ENDIF
Note: See TracChangeset
for help on using the changeset viewer.