3. NODA Experiment - delippi/PhD-globalRadarOSSE GitHub Wiki

THE NODA EXPERIMENT

You will need to run the forecast only workflow to aid in initial condition creation (for the deterministic portion). Then you will switch to the cycled workflow so that all experiments are cycled even though you will not assimilate observations. You will also need to create initial conditions for the C384 ensemble.

1. Ensure that reflectivity is in the diag table.

See "add reflectivity to the diag table" in the setup FV3GFS wiki

2. Spin up the model to make ICs

Set up the NODA run model spin up workflow (288hr fcst).

!set up the directories (only do this the very first time)
cd /gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE
ksh run_NODA_ICS000.ksh

!create the initial conditions. the perturbed ICs are only for simulating rws.
cd /gpfs/hps3/emc/meso/save/Donald.E.Lippi/global-workflow-20190306/ush
bsub < fv3gfs_chgres.noda.sh
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS


!move the ICs to the proper locations and save.
mkdir -p /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300/
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300/
rm -rf enkfgdas.20180911 gdas.20180911
mv /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS/NODA-2018091100-2018092300/enkfgdas.20180911 .
mv /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS/NODA-2018091100-2018092300/gdas.20180911 .
htar -cvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018092300_ICS000.tar NODA-2018091100-2018092300

!redownload ICs
mkdir -p /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/
htar -xvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018092300_ICS000.tar

!run the realobs experiment
cd /gpfs/hps3/emc/meso/save/Donald.E.Lippi/global-workflow-20190306/sorc/gsi.fd/scripts
vi exglobal_analysis_fv3gfs.sh.ecf.NODA
ln -sf exglobal_analysis_fv3gfs.sh.ecf.NODA exglobal_analysis_fv3gfs.sh.ecf
cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300
vi NODA-2018091100-2018092300.xml [take out vrfy and arch steps]
ksh run.ksh


sed -i "s/HPSS_PROJECT=emc-global/HPSS_PROJECT=emc-meso/g" config.base
sed -i "s/export FHCYC=1/export FHCYC=00/g" config.base
sed -i "s/export FHCYC=24/export FHCYC=00/g" config.base
sed -i "s/export FHMAX_GFS_00=240/export FHMAX_GFS_00=282/g" config.base
sed -i "s/export FHOUT_GFS=3/export FHOUT_GFS=24/g" config.base
sed -i "s/export DONST=\"YES\"/export DONST=\"NO\"/g" config.base
sed -i "s/export nst_anl=.true./export nst_anl=.false./g" config.base
sed -i "s/export restart_interval=\${restart_interval_gfs:-0}/export restart_interval=288/g" config.fcst
sed -i "s/export restart_interval=6/export restart_interval=288/g" config.fcst
sed -i "s/export hord_mt_nh_nonmono=6/export hord_mt_nh_nonmono=5/g" config.fcst
sed -i "s/export hord_xx_nh_nonmono=6/export hord_xx_nh_nonmono=5/g" config.fcst
sed -i "s/diag_table_da/diag_table/g" config.fcst
sed -i "s/diag_table_da_orig/diag_table_orig/g" config.fcst

sed -i "s/export wtime_fcst=\"01:00:00\"/export wtime_fcst=\"06:00:00\"/g" config.fcst
sed -i "s/export npe_fcst=$(echo \"$layout_x * $layout_y * 6\" | bc)/export npe_fcst=$(echo \"$layout_x * $layout_y * 6\" | bc)/g" config.fcst
sed -i "s/export wtime_efcs=\"03:00:00\"/export wtime_efcs=\"06:00:00\"/g" config.fcst

sed -i "s/export FHMIN_ENKF=3/export FHMIN_ENKF=0/g" config.base
sed -i "s/export FHMAX_ENKF=9/export FHMAX_ENKF=288/g" config.base
sed -i "s/export FHOUT=1/export FHOUT=288/g" config.base
sed -i "s/export FHOUT_ENKF=1/export FHOUT_ENKF=288/g" config.base
sed -i "s/export FHOUT_ENKF=3/export FHOUT_ENKF=288/g" config.base

sed -i "s/export NMEM_EFCSGRP=4/export NMEM_EFCSGRP=2/g" config.efcs
sed -i "s/export restart_interval=6/export restart_interval=288/g" config.efcs


diff --git a/ush/rocoto/setup_workflow.py b/ush/rocoto/setup_workflow.py
index d605fc3..09267d4 100755
--- a/ush/rocoto/setup_workflow.py
+++ b/ush/rocoto/setup_workflow.py
@@ -85,7 +85,7 @@ def get_gfs_cyc_dates(base):
     # Set GFS cycling dates
     hrdet = 0
     if gfs_cyc == 1:
-        hrinc = 24 - sdate.hour
+        hrinc = 288 - sdate.hour
         hrdet = edate.hour
     elif gfs_cyc == 2:
         if sdate.hour in [0, 12]:
@@ -638,9 +638,9 @@ def get_workflow_header(base):
     strings.append('\t<log verbosity="10"><cyclestr>&EXPDIR;/logs/@Y@m@[email protected]</cyclestr></log>\n')
     strings.append('\n')
     strings.append('\t<!-- Define the cycles -->\n')
-    strings.append('\t<cycledef group="first">&SDATE;     &SDATE;     06:00:00</cycledef>\n')
-    strings.append('\t<cycledef group="enkf" >&SDATE;     &EDATE;     06:00:00</cycledef>\n')
-    strings.append('\t<cycledef group="gdas" >&SDATE;     &EDATE;     06:00:00</cycledef>\n')
+    strings.append('\t<cycledef group="first">&SDATE;     &SDATE;    288:00:00</cycledef>\n')
+    strings.append('\t<cycledef group="enkf" >&SDATE;     &EDATE;    288:00:00</cycledef>\n')
+    strings.append('\t<cycledef group="gdas" >&SDATE;     &EDATE;    288:00:00</cycledef>\n')
     if base['gfs_cyc'] != 0:
         strings.append('\t<cycledef group="gfs"  >&SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS;</cycledef>\n')


diff --git a/ush/rocoto/workflow_utils.py b/ush/rocoto/workflow_utils.py
index 6bbaaf9..e6afc9a 100755
--- a/ush/rocoto/workflow_utils.py
+++ b/ush/rocoto/workflow_utils.py
@@ -209,7 +209,7 @@ def create_firstcyc_task(cdump='gdas'):

     deps = []
     data = '&EXPDIR;/logs/@Y@m@[email protected]'
-    dep_dict = {'type':'data', 'data':data, 'offset':'24:00:00'}
+    dep_dict = {'type':'data', 'data':data, 'offset':'288:00:00'}
     deps.append(rocoto.add_dependency(dep_dict))
     dep_dict = {'type':'cycleexist', 'condition':'not', 'offset':'-06:00:00'}
     deps.append(rocoto.add_dependency(dep_dict))

!save the ensemble
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300
vi efcsarch.ksh
#!/bin/ksh
nmem=1
typeset -Z3 nmem
hpss=/NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS
hsi "mkdir -p $hpss/enkfgdas.20180911/00"
cray=/gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300
cd $cray
rm -f efcsarch.grp0*
memgrp=1
grp=1
typeset -Z2 grp
memstr=""
nmemstr=0
while [[ $nmem -le 80 ]]; do
   memstr="$memstr mem$nmem"
   (( nmemstr = nmemstr + 1 ))
   if [[ $nmemstr -eq 2 ]]; then
cat <<EOF >>efcsarch.grp$grp.ksh
#!/bin/ksh
#BSUB -P FV3GFS-T2O
#BSUB -J efcsarch.grp$grp
#BSUB -W 06:00              # wall-clock time (hrs:mins)
#BSUB -n 1                  # number of tasks in job
#BSUB -R "rusage[mem=1024]" # memory
#BSUB -q "dev_transfer"     # queue
#BSUB -o efcsarch.grp$grp.log    # output file name in which %J is replaced by the job ID
hpss=$hpss
cray=$cray
cd $cray/enkfgdas.20180911/00
hsi "cd $hpss/enkfgdas.20180911/00; cput -PR $memstr"
EOF
      bsub < efcsarch.grp$grp.ksh
      (( grp = grp + 1 ))
      nmemstr=0
      memstr=""
   fi
   (( nmem = nmem +1 ))
done

ksh efcsarch.ksh

OLD

mkdir -p /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS/NODA-2018091100-2018092300/
cd /gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE
vi run_NODA_288hrspinup.ksh

+ CDATE=2018091100
+ EDATE=2018092300
+ PSLOT="NODA-${CDATE}-${EDATE}"
+ RES=768
+ GFS_CYC=1
+ fv3gfs="global-workflow-20190306"

ksh run_NODA_288hrspinup.ksh
cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300

vi config.base
30c30
< HPSS_PROJECT=emc-global
---
> HPSS_PROJECT=emc-meso
210c210
<    export FHCYC=24
---
>    export FHCYC=00
224c224
< export FHMAX_GFS_00=240
---
> export FHMAX_GFS_00=288
230c230
< export FHOUT_GFS=3
---
> export FHOUT_GFS=24
246c246
< export DOHYBVAR="YES"
---
> export DOHYBVAR="NO"
265c265
< export DONST="YES"
---
> export DONST="NO"
269c269
< export nst_anl=.true.
---
> export nst_anl=.false.

rm -rf config.base.default

vi config.fcst
124c124
<      export restart_interval=${restart_interval_gfs:-0}
---
>      export restart_interval=288 #${restart_interval_gfs:-0}

Create initial conditions.

We want to create all new initial conditions using an ensemble member.

cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300
ksh run.ksh [or set up the crontab job, if rocoto isn't updating the database, you're using the wrong version of rocoto]
  • 00 - gfsgetic

Get perturbed atmosphere.

The initial conditions come from: [/NCEPPROD/hpssprod/runhistory/rh2018/201809/20180911/gpfs_hps_nco_ops_com_gfs_prod_enkf.20180911_00.anl.tar ./gdas.t00z.ratmanl.mem080.nemsio]

cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS/2018091100/gfs
htar -xvf /NCEPPROD/hpssprod/runhistory/rh2018/201809/20180911/gpfs_hps_nco_ops_com_gfs_prod_enkf.20180911_00.anl.tar ./gdas.t00z.ratmanl.mem080.nemsio
mkdir replaced
mv siganl.gfs.2018091100 replaced/.
mv ./gdas.t00z.ratmanl.mem080.nemsio ./siganl.gfs.2018091100
  • 00 - gfsfv3ic [/gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS]

Save the initial conditions to tape:

cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/FV3ICS
htar -cvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018091800_ICS000.tar 2018091100
htar -xvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018091800_ICS000.tar 2018091100

Run the 288hr spin up forecast and save the initial conditions to tape:

cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300
ksh run.ksh
  • 00 - gfsfcst [317GB @ 24hrly @ C768 and takes about 1:20 hr on WCOSS]
  • 00 - gfspost
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100
htar -cvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018091800_ICS288.tar NODA-2018091100-2018092300

3. Set up the noda run experiment (1 week or 168hr fcst).

Download initial conditions

mkdir -p /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700/gdas.20180922/18/
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700/gdas.20180922/18/
htar -xvf /NCEPDEV/emc-meso/5year/Donald.E.Lippi/rw_FV3GFS/FV3ICS/NODA-2018091100-2018091800_ICS288.tar NODA-2018091100-2018092300/gfs.20180911/00/RERUN_RESTART
mv RERUN_RESTART RESTART

Modify the setup script.

cd /gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE
vi run_NODA_288hrrestart.ksh
+ CDATE=2018092300
+ EDATE=2018100700

ksh run_NODA_288hrrestart.ksh

Modify config.base and config.post

cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700
vi config.base
30c30
< HPSS_PROJECT=emc-global
---
> HPSS_PROJECT=emc-meso
208c208
<    export FHCYC=1
---
>    export FHCYC=0
210c210
<    export FHCYC=24
---
>    export FHCYC=0
215,216c215,216
< export FHMAX=9
< export FHOUT=3
---
> export FHMAX=6
> export FHOUT=6
219c219
< export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
---
> export gfs_cyc=4 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles.
224,227c224,227
< export FHMAX_GFS_00=240
< export FHMAX_GFS_06=180
< export FHMAX_GFS_12=180
< export FHMAX_GFS_18=180
---
> export FHMAX_GFS_00=168
> export FHMAX_GFS_06=168
> export FHMAX_GFS_12=168
> export FHMAX_GFS_18=168
230c230
< export FHOUT_GFS=3
---
> export FHOUT_GFS=1
233a234,236
> #bucket size
> export FHZER=${FHZER:-1} #6
>
246c249
< export DOHYBVAR="YES"
---
> export DOHYBVAR="NO"
265c268
< export DONST="YES"
---
> export DONST="NO"
269c272
< export nst_anl=.true.
---
> export nst_anl=.false.


vi config.post
24,27c24,27
< export GOESF=YES                             # goes image
< export GTGF=NO                               # gtg icing product
< export FLXF=YES                              # grib2 flux file written by post
< export PGB1F=YES
---
> export GOESF=NO                              # goes image
> export GTGF=NO                                # gtg icing product
> export FLXF=NO                               # grib2 flux file written by post
> export PGB1F=NO
37c37
< export downset=2
---
> export downset=1 #2

Modify the post output fields - sorc/gfs_post.fd/parm/global_1x1_paramlist_g2

4LFTX:surface
5WAVH:500 mb
ABSV:1000 mb
ABSV:100 mb
ABSV:150 mb
ABSV:200 mb
ABSV:250 mb
ABSV:300 mb
ABSV:30 mb
ABSV:350 mb
ABSV:400 mb
ABSV:450 mb
ABSV:500 mb
ABSV:50 mb
ABSV:550 mb
ABSV:600 mb
ABSV:650 mb
ABSV:700 mb
ABSV:70 mb
ABSV:750 mb
ABSV:800 mb
ABSV:850 mb
ABSV:900 mb
ABSV:925 mb
ABSV:950 mb
ABSV:975 mb
ACPCP:surface
ALBDO:surface
APCP:surface
APTMP:2 m above ground
CAPE:180-0 mb above ground
CAPE:255-0 mb above ground
CAPE:surface
CFRZR:surface
CICEP:surface
CIN:180-0 mb above ground
CIN:255-0 mb above ground
CIN:surface
CLWMR:1000 mb
CLWMR:100 mb
CLWMR:150 mb
CLWMR:200 mb
CLWMR:250 mb
CLWMR:300 mb
CLWMR:350 mb
CLWMR:400 mb
CLWMR:450 mb
CLWMR:500 mb
CLWMR:550 mb
CLWMR:600 mb
CLWMR:650 mb
CLWMR:700 mb
CLWMR:750 mb
CLWMR:800 mb
CLWMR:850 mb
CLWMR:900 mb
CLWMR:925 mb
CLWMR:950 mb
CLWMR:975 mb
CLWMR:1 hybrid level
CPOFP:surface
CPRAT:surface
CRAIN:surface
CSNOW:surface
CWAT:entire atmosphere (considered as a single layer)
CWORK:entire atmosphere (considered as a single layer)
DLWRF:surface
DPT:2 m above ground
DSWRF:surface
FLDCP:surface
GFLUX:surface
GUST:surface
HGT:0C isotherm
HGT:1000 mb
HGT:100 mb
HGT:150 mb
HGT:200 mb
HGT:250 mb
HGT:300 mb
HGT:30 mb
HGT:350 mb
HGT:400 mb
HGT:450 mb
HGT:500 mb
HGT:50 mb
HGT:550 mb
HGT:600 mb
HGT:650 mb
HGT:700 mb
HGT:70 mb
HGT:750 mb
HGT:800 mb
HGT:850 mb
HGT:900 mb
HGT:925 mb
HGT:950 mb
HGT:975 mb
HGT:highest tropospheric freezing level
HGT:max wind
HGT:PV=-2e-06 (Km^2/kg/s) surface
HGT:PV=2e-06 (Km^2/kg/s) surface
HGT:surface
HGT:tropopause
HINDEX:surface
HLCY:3000-0 m above ground
HPBL:surface
ICAHT:max wind
ICAHT:tropopause
ICEC:surface
ICSEV:100 mb
ICSEV:150 mb
ICSEV:200 mb
ICSEV:250 mb
ICSEV:300 mb
ICSEV:350 mb
ICSEV:400 mb
ICSEV:450 mb
ICSEV:500 mb
ICSEV:550 mb
ICSEV:600 mb
ICSEV:650 mb
ICSEV:700 mb
ICSEV:750 mb
ICSEV:800 mb
ICSEV:850 mb
ICSEV:900 mb
ICSEV:950 mb
ICSEV:1000 mb
ICMR:50 mb
ICMR:1000 mb
ICMR:100 mb
ICMR:150 mb
ICMR:200 mb
ICMR:250 mb
ICMR:300 mb
ICMR:350 mb
ICMR:400 mb
ICMR:450 mb
ICMR:500 mb
ICMR:550 mb
ICMR:600 mb
ICMR:650 mb
ICMR:700 mb
ICMR:750 mb
ICMR:800 mb
ICMR:850 mb
ICMR:900 mb
ICMR:925 mb
ICMR:950 mb
ICMR:975 mb
ICMR:1 hybrid level
LAND:surface
LFTX:surface
LHTFL:surface
MSLET:mean sea level
NCPCP:surface
PEVPR:surface
PLI:30-0 mb above ground
PLPL:255-0 mb above ground
POT:0.995 sigma level
PRATE:surface
PRES:80 m above ground
PRES:convective cloud bottom level
PRES:convective cloud top level
PRES:high cloud bottom level
PRES:high cloud top level
PRES:low cloud bottom level
PRES:low cloud top level
PRES:max wind
PRES:mean sea level
PRES:middle cloud bottom level
PRES:middle cloud top level
PRES:PV=-2e-06 (Km^2/kg/s) surface
PRES:PV=2e-06 (Km^2/kg/s) surface
PRES:surface
PRES:tropopause
PRMSL:mean sea level
PWAT:entire atmosphere
REFC:entire atmosphere
RH:0.33-1 sigma layer
RH:0.44-0.72 sigma layer
RH:0.44-1 sigma layer
RH:0.72-0.94 sigma layer
RH:0.995 sigma level
RH:0C isotherm
RH:1000 mb
RH:100 mb
RH:150 mb
RH:200 mb
RH:250 mb
RH:2 m above ground
RH:300 mb
RH:30-0 mb above ground
RH:30 mb
RH:350 mb
RH:400 mb
RH:450 mb
RH:500 mb
RH:50 mb
RH:550 mb
RH:600 mb
RH:650 mb
RH:700 mb
RH:70 mb
RH:750 mb
RH:800 mb
RH:850 mb
RH:900 mb
RH:925 mb
RH:950 mb
RH:975 mb
RH:entire atmosphere (considered as a single layer)
RH:highest tropospheric freezing level
SFCR:surface
RWMR:1000 mb
RWMR:100 mb
RWMR:150 mb
RWMR:200 mb
RWMR:250 mb
RWMR:300 mb
RWMR:350 mb
RWMR:400 mb
RWMR:450 mb
RWMR:500 mb
RWMR:550 mb
RWMR:600 mb
RWMR:650 mb
RWMR:700 mb
RWMR:750 mb
RWMR:800 mb
RWMR:850 mb
RWMR:900 mb
RWMR:925 mb
RWMR:950 mb
RWMR:975 mb
RWMR:1 hybrid level
SHTFL:surface
SNMR:1000 mb
SNMR:100 mb
SNMR:150 mb
SNMR:200 mb
SNMR:250 mb
SNMR:300 mb
SNMR:350 mb
SNMR:400 mb
SNMR:450 mb
SNMR:500 mb
SNMR:550 mb
SNMR:600 mb
SNMR:650 mb
SNMR:700 mb
SNMR:750 mb
SNMR:800 mb
SNMR:850 mb
SNMR:900 mb
SNMR:925 mb
SNMR:950 mb
SNMR:975 mb
SNMR:1 hybrid level
SOILW:0-0.1 m below ground
SOILW:0.1-0.4 m below ground
SOILW:0.4-1 m below ground
SOILW:1-2 m below ground
SPFH:2 m above ground
SPFH:30-0 mb above ground
SPFH:80 m above ground
SPFH:850 mb
SPFH:900 mb
SPFH:90-60 mb above ground
SPFH:950 mb
SUNSD:surface
TCDC:boundary layer cloud layer
TCDC:convective cloud layer
TCDC:entire atmosphere
TCDC:high cloud layer
TCDC:low cloud layer
TCDC:middle cloud layer
TMAX:2 m above ground
TMIN:2 m above ground
TSOIL:0-0.1 m below ground
TSOIL:0.1-0.4 m below ground
TSOIL:0.4-1 m below ground
TMP:0.995 sigma level
TMP:1000 mb
TMP:100 m above ground
TMP:100 mb
TMP:10 mb
TMP:15 mb
TSOIL:1-2 m below ground
TMP:150 mb
TMP:1829 m above mean sea level
TMP:200 mb
TMP:250 mb
TMP:2743 m above mean sea level
TMP:2 m above ground
TMP:300 mb
TMP:30-0 mb above ground
TMP:350 mb
TMP:3658 m above mean sea level
TMP:400 mb
TMP:450 mb
TMP:500 mb
TMP:50 mb
TMP:550 mb
TMP:600 mb
TMP:650 mb
TMP:700 mb
TMP:70 mb
TMP:750 mb
TMP:800 mb
TMP:80 m above ground
TMP:850 mb
TMP:900 mb
TMP:925 mb
TMP:950 mb
TMP:975 mb
TMP:high cloud top level
TMP:low cloud top level
TMP:max wind
TMP:middle cloud top level
TMP:PV=-2e-06 (Km^2/kg/s) surface
TMP:PV=2e-06 (Km^2/kg/s) surface
TMP:surface
TMP:tropopause
TOZNE:entire atmosphere (considered as a single layer)
UFLX:surface
UGRD:0.995 sigma level
UGRD:1000 mb
UGRD:100 m above ground
UGRD:100 mb
UGRD:10 m above ground
UGRD:150 mb
UGRD:1829 m above mean sea level
UGRD:200 mb
UGRD:20 m above ground
UGRD:250 mb
UGRD:2743 m above mean sea level
UGRD:300 mb
UGRD:30 m above ground
UGRD:30-0 mb above ground
UGRD:30 mb
UGRD:350 mb
UGRD:3658 m above mean sea level
UGRD:400 mb
UGRD:40 m above ground
UGRD:450 mb
UGRD:500 mb
UGRD:50 mb
UGRD:50 m above ground
UGRD:550 mb
UGRD:600 mb
UGRD:650 mb
UGRD:700 mb
UGRD:70 mb
UGRD:750 mb
UGRD:800 mb
UGRD:80 m above ground
UGRD:850 mb
UGRD:900 mb
UGRD:925 mb
UGRD:950 mb
UGRD:975 mb
UGRD:max wind
UGRD:planetary boundary layer
UGRD:PV=-2e-06 (Km^2/kg/s) surface
UGRD:PV=2e-06 (Km^2/kg/s) surface
UGRD:tropopause
U-GWD:surface
ULWRF:surface
ULWRF:top of atmosphere
USTM:6000-0 m above ground
USWRF:surface
USWRF:top of atmosphere
VFLX:surface
VGRD:0.995 sigma level
VGRD:1000 mb
VGRD:100 m above ground
VGRD:100 mb
VGRD:10 m above ground
VGRD:150 mb
VGRD:1829 m above mean sea level
VGRD:200 mb
VGRD:20 mb
VGRD:20 m above ground
VGRD:250 mb
VGRD:2743 m above mean sea level
VGRD:300 mb
VGRD:30 m above ground
VGRD:30-0 mb above ground
VGRD:30 mb
VGRD:350 mb
VGRD:3658 m above mean sea level
VGRD:400 mb
VGRD:40 m above ground
VGRD:450 mb
VGRD:500 mb
VGRD:50 mb
VGRD:50 m above ground
VGRD:550 mb
VGRD:600 mb
VGRD:650 mb
VGRD:700 mb
VGRD:70 mb
VGRD:750 mb
VGRD:800 mb
VGRD:80 m above ground
VGRD:850 mb
VGRD:900 mb
VGRD:925 mb
VGRD:950 mb
VGRD:975 mb
VGRD:max wind
VGRD:planetary boundary layer
VGRD:PV=-2e-06 (Km^2/kg/s) surface
VGRD:PV=2e-06 (Km^2/kg/s) surface
VGRD:tropopause
V-GWD:surface
VRATE:planetary boundary layer
VSTM:6000-0 m above ground
DZDT:1000 mb
DZDT:100 mb
DZDT:150 mb
DZDT:200 mb
DZDT:250 mb
DZDT:300 mb
DZDT:350 mb
DZDT:400 mb
DZDT:450 mb
DZDT:500 mb
DZDT:550 mb
DZDT:600 mb
DZDT:650 mb
DZDT:700 mb
DZDT:750 mb
DZDT:800 mb
DZDT:850 mb
DZDT:900 mb
DZDT:925 mb
DZDT:950 mb
DZDT:975 mb
VVEL:0.995 sigma level
VVEL:1000 mb
VVEL:100 mb
VVEL:150 mb
VVEL:200 mb
VVEL:250 mb
VVEL:300 mb
VVEL:350 mb
VVEL:400 mb
VVEL:450 mb
VVEL:500 mb
VVEL:550 mb
VVEL:600 mb
VVEL:650 mb
VVEL:700 mb
VVEL:750 mb
VVEL:800 mb
VVEL:850 mb
VVEL:900 mb
VVEL:925 mb
VVEL:950 mb
VVEL:975 mb
VWSH:PV=-2e-06 (Km^2/kg/s) surface
VWSH:PV=2e-06 (Km^2/kg/s) surface
VWSH:tropopause
WATR:surface
WEASD:surface
WILT:surface
SNOD:surface
VIS:surface

Modify exglobal_fcst_nemsfv3gfs.sh

210c210
<   for file in $memdir/RESTART/${PDY}.${cyc}0000.*.nc; do
---
>   for file in $gmemdir/RESTART/${PDY}.${cyc}0000.*.nc; do
214,215c214,215
<     if [ $fsufanl = "sfcanl_data" ]; then
<       file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g")
---
>     if [ $fsufanl = "sfc_data" ]; then
>       #file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g")


mkdir -p /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700/gfs.20180923/00
cd /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700/gfs.20180923/00
cp -r /gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018091100/NODA-2018091100-2018092300/gfs.20180911/00/RERUN_RESTART ./RESTART

Modify the xml to run "cycled" forecasts

https://github.com/christopherwharrop/rocoto/wiki/documentation

cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700
vi NODA-2018092300-2018100700.xml
!for reference: /gpfs/hps3/emc/meso/save/Donald.E.Lippi/model_spinup/run_fcst/from_fanglin/fv3test.xml_fanglin


17a18,20
>       <!ENTITY SDATE    "201809230000">
>       <!--<!ENTITY EDATE    "201810070000">-->
>       <!ENTITY EDATE    "201809230600">
22,24c25,27
<       <!ENTITY SDATE    "201809230000">
<       <!ENTITY EDATE    "201810070000">
<       <!ENTITY INTERVAL "24:00:00">
---
>       <!ENTITY SDATE_GFS    "201809230000">
>       <!ENTITY EDATE_GFS    "201810070000">
>       <!ENTITY INTERVAL_GFS "06:00:00">
49,50c52,53
<       <!ENTITY TASKTHROTTLE  "20">
<       <!ENTITY MAXTRIES      "2">
---
>       <!ENTITY TASKTHROTTLE  "40">
>       <!ENTITY MAXTRIES      "1">
53,64c56,60
<
<       <!ENTITY QUEUE_GETIC_GFS     "&QUEUE_ARCH;">
<       <!ENTITY WALLTIME_GETIC_GFS  "06:00:00">
<       <!ENTITY RESOURCES_GETIC_GFS "<nodes>1:ppn=1</nodes><shared></shared>">
<       <!ENTITY MEMORY_GETIC_GFS    "2048M">
<       <!ENTITY NATIVE_GETIC_GFS    "">
<
<       <!ENTITY QUEUE_FV3IC_GFS     "&QUEUE;">
<       <!ENTITY WALLTIME_FV3IC_GFS  "00:30:00">
<       <!ENTITY RESOURCES_FV3IC_GFS "<nodes>1:ppn=1</nodes>">
<       <!ENTITY MEMORY_FV3IC_GFS    "">
<       <!ENTITY NATIVE_FV3IC_GFS    "">
---
>         <!ENTITY QUEUE_FCST_GDAS     "&QUEUE;">
>         <!ENTITY WALLTIME_FCST_GDAS  "01:00:00">
>         <!ENTITY RESOURCES_FCST_GDAS "<nodes>108:ppn=12</nodes>">
>         <!ENTITY MEMORY_FCST_GDAS    "1024M">
>         <!ENTITY NATIVE_FCST_GDAS    "">
73c69
<       <!ENTITY WALLTIME_POST_GFS  "06:00:00">
---
>       <!ENTITY WALLTIME_POST_GFS  "01:15:00">
78,83d73
<       <!ENTITY QUEUE_VRFY_GFS     "&QUEUE;">
<       <!ENTITY WALLTIME_VRFY_GFS  "06:00:00">
<       <!ENTITY RESOURCES_VRFY_GFS "<nodes>1:ppn=1</nodes>">
<       <!ENTITY MEMORY_VRFY_GFS    "3072M">
<       <!ENTITY NATIVE_VRFY_GFS    "">
<
86c76,77
<       <!ENTITY RESOURCES_ARCH_GFS "<nodes>1:ppn=1</nodes><shared></shared>">
---
>       <!ENTITY WALLTIME_CLEAN_GFS  "00:20:00">
>         <!ENTITY RESOURCES_ARCH_GFS "<nodes>1:ppn=1</nodes><shared></shared>">
99c90,91
<       <cycledef group="gfs">&SDATE; &EDATE; &INTERVAL;</cycledef>
---
>         <cycledef group="gdas" >&SDATE;     &EDATE;     06:00:00</cycledef>
>       <cycledef group="gfs"  >&SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS;</cycledef>
101c93
< <task name="gfsgetic" cycledefs="gfs" maxtries="&MAXTRIES;">
---
> <task name="gdasfcst" cycledefs="gdas" maxtries="&MAXTRIES;">
103c95
<       <command>&JOBS_DIR;/getic.sh</command>
---
>         <command>&JOBS_DIR;/fcst.sh</command>
105,121c97,113
<       <jobname><cyclestr>&PSLOT;_gfsgetic_@H</cyclestr></jobname>
<       <account>&ACCOUNT;</account>
<       <queue>&QUEUE_GETIC_GFS;</queue>
<       &RESOURCES_GETIC_GFS;
<       <walltime>&WALLTIME_GETIC_GFS;</walltime>
<       <memory>&MEMORY_GETIC_GFS;</memory>
<       <native>&NATIVE_GETIC_GFS;</native>
<
<       <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsgetic.log</cyclestr></join>
<
<       <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
<       <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
<       <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
<       <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
<       <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
<       <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
<       <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
---
>         <jobname><cyclestr>&PSLOT;_gdasfcst_@H</cyclestr></jobname>
>         <account>&ACCOUNT;</account>
>         <queue>&QUEUE_FCST_GDAS;</queue>
>         &RESOURCES_FCST_GDAS;
>         <walltime>&WALLTIME_FCST_GDAS;</walltime>
>         <memory>&MEMORY_FCST_GDAS;</memory>
>         <native>&NATIVE_FCST_GDAS;</native>
>
>         <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gdasfcst.log</cyclestr></join>
>
>         <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
>         <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
>         <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
>         <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
>         <envar><name>CDUMP</name><value>gdas</value></envar>
>         <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
>         <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
124,131c116,117
<               <not>
<                       <and>
<                               <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/pgbanl.&CDUMP;.@Y@m@d@H</cyclestr></datadep>
<                               <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/siganl.&CDUMP;.@Y@m@d@H</cyclestr></datadep>
<                               <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/sfcanl.&CDUMP;.@Y@m@d@H</cyclestr></datadep>
<                       </and>
<               </not>
<       </dependency>
---
>             <datadep><cyclestr offset="-6:00:00">&ROTDIR;/gdas.@Y@m@d/@H/RESTART</cyclestr></datadep>
>         </dependency>
135,172d120
< <task name="gfsfv3ic" cycledefs="gfs" maxtries="&MAXTRIES;">
<
<       <command>&JOBS_DIR;/fv3ic.sh</command>
<
<       <jobname><cyclestr>&PSLOT;_gfsfv3ic_@H</cyclestr></jobname>
<       <account>&ACCOUNT;</account>
<       <queue>&QUEUE_FV3IC_GFS;</queue>
<       &RESOURCES_FV3IC_GFS;
<       <walltime>&WALLTIME_FV3IC_GFS;</walltime>
<       <memory>&MEMORY_FV3IC_GFS;</memory>
<       <native>&NATIVE_FV3IC_GFS;</native>
<
<       <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsfv3ic.log</cyclestr></join>
<
<       <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
<       <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
<       <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
<       <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
<       <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
<       <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
<       <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
<
<       <dependency>
<               <and>
<                       <and>
<                               <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/siganl.&CDUMP;.@Y@m@d@H</cyclestr></datadep>
<                               <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/sfcanl.&CDUMP;.@Y@m@d@H</cyclestr></datadep>
<                       </and>
<                       <not>
<                               <and>
<                                       <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/gfs_data.tile6.nc</cyclestr></datadep>
<                                       <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/sfc_data.tile6.nc</cyclestr></datadep>
<                               </and>
<                       </not>
<               </and>
<       </dependency>
<
< </task>
197,201c145,146
<               <and>
<                       <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/gfs_data.tile6.nc</cyclestr></datadep>
<                       <datadep><cyclestr>&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/sfc_data.tile6.nc</cyclestr></datadep>
<               </and>
<       </dependency>
---
>             <datadep><cyclestr offset="-6:00:00">&ROTDIR;/gdas.@Y@m@d/@H/RESTART</cyclestr></datadep>
>         </dependency>
207,209c152,157
<       <var name="grp">001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020</var>
<       <var name="dep">f012 f024 f036 f048 f060 f072 f084 f096 f108 f120 f132 f144 f156 f168 f180 f192 f204 f216 f228 f240</var>
<       <var name="lst">f000_f003_f006_f009_f012 f015_f018_f021_f024 f027_f030_f033_f036 f039_f042_f045_f048 f051_f054_f057_f060 f063_f066_f069_f072 f075_f078_f081_f084 f087_f090_f093_f096 f099_f102_f105_f108 f111_f114_f117_f120 f123_f126_f129_f132 f135_f138_f141_f144 f147_f150_f153_f156 f159_f162_f165_f168 f171_f174_f177_f180 f183_f186_f189_f192 f195_f198_f201_f204 f207_f210_f213_f216 f219_f222_f225_f228 f231_f234_f237_f240</var>
---
>         <var name="grp">001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020 021 022 023 024 025 026 027 028 029 030 031 032 033 034 035 036 037 038 039 040 041 042 043 044 045 046 047 048 049 050 051 052 053 054 055 056 057 058 059 060 061 062 063 064 065 066 067 068 069 070 071 072 073 074 075 076 077 078 079 080 081 082 083 084 085</var>
>
>         <var name="dep">f001 f003 f005 f007 f009 f011 f013 f015 f017 f019 f021 f023 f025 f027 f029 f031 f033 f035 f037 f039 f041 f043 f045 f047 f049 f051 f053 f055 f057 f059 f061 f063 f065 f067 f069 f071 f073 f075 f077 f079 f081 f083 f085 f087 f089 f091 f093 f095 f097 f099 f101 f103 f105 f107 f109 f111 f113 f115 f117 f119 f121 f123 f125 f127 f129 f131 f133 f135 f137 f139 f141 f143 f145 f147 f149 f151 f153 f155 f157 f159 f161 f163 f165 f167 f168</var>
>
>         <var name="lst">f000_f001 f002_f003 f004_f005 f006_f007 f008_f009 f010_f011 f012_f013 f014_f015 f016_f017 f018_f019 f020_f021 f022_f023 f024_f025 f026_f027 f028_f029 f030_f031 f032_f033 f034_f035 f036_f037 f038_f039 f040_f041 f042_f043 f044_f045 f046_f047 f048_f049 f050_f051 f052_f053 f054_f055 f056_f057 f058_f059 f060_f061 f062_f063 f064_f065 f066_f067 f068_f069 f070_f071 f072_f073 f074_f075 f076_f077 f078_f079 f080_f081 f082_f083 f084_f085 f086_f087 f088_f089 f090_f091 f092_f093 f094_f095 f096_f097 f098_f099 f100_f101 f102_f103 f104_f105 f106_f107 f108_f109 f110_f111 f112_f113 f114_f115 f116_f117 f118_f119 f120_f121 f122_f123 f124_f125 f126_f127 f128_f129 f130_f131 f132_f133 f134_f135 f136_f137 f138_f139 f140_f141 f142_f143 f144_f145 f146_f147 f148_f149 f150_f151 f152_f153 f154_f155 f156_f157 f158_f159 f160_f161 f162_f163 f164_f165 f166_f167 f168</var>
>
244c192
< <task name="gfsvrfy" cycledefs="gfs" maxtries="&MAXTRIES;">
---
> <task name="gfscopy" cycledefs="gfs" maxtries="&MAXTRIES;">
246c194
<       <command>&JOBS_DIR;/vrfy.sh</command>
---
>         <command>/gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE/arch_NODA/run_copy.ksh</command>
248,268c196,218
<       <jobname><cyclestr>&PSLOT;_gfsvrfy_@H</cyclestr></jobname>
<       <account>&ACCOUNT;</account>
<       <queue>&QUEUE_VRFY_GFS;</queue>
<       &RESOURCES_VRFY_GFS;
<       <walltime>&WALLTIME_VRFY_GFS;</walltime>
<       <memory>&MEMORY_VRFY_GFS;</memory>
<       <native>&NATIVE_VRFY_GFS;</native>
<
<       <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log</cyclestr></join>
<
<       <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
<       <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
<       <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
<       <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
<       <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
<       <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
<       <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
<
<       <dependency>
<               <metataskdep metatask="gfspost"/>
<       </dependency>
---
>         <jobname><cyclestr>&PSLOT;_gfscopy_@H</cyclestr></jobname>
>         <account>&ACCOUNT;</account>
>         <queue>&QUEUE_ARCH_GFS;</queue>
>         &RESOURCES_ARCH_GFS;
>         <walltime>&WALLTIME_ARCH_GFS;</walltime>
>         <memory>&MEMORY_ARCH_GFS;</memory>
>         <native>&NATIVE_ARCH_GFS;</native>
>
>         <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfscopy.log</cyclestr></join>
>
>         <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
>         <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
>         <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
>         <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
>         <envar><name>CDUMP</name><value>gfs</value></envar>
>         <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
>         <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
>
>         <!--
>         -->
>         <dependency>
>                         <metataskdep metatask="gfspost"/>
>         </dependency>
272,282c222
< <task name="gfsarch" cycledefs="gfs" maxtries="&MAXTRIES;" final="true">
<
<       <command>&JOBS_DIR;/arch.sh</command>
<
<       <jobname><cyclestr>&PSLOT;_gfsarch_@H</cyclestr></jobname>
<       <account>&ACCOUNT;</account>
<       <queue>&QUEUE_ARCH_GFS;</queue>
<       &RESOURCES_ARCH_GFS;
<       <walltime>&WALLTIME_ARCH_GFS;</walltime>
<       <memory>&MEMORY_ARCH_GFS;</memory>
<       <native>&NATIVE_ARCH_GFS;</native>
---
> <task name="gfsarch" cycledefs="gfs" maxtries="&MAXTRIES;">
284,292c224
<       <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsarch.log</cyclestr></join>
<
<       <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
<       <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
<       <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
<       <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
<       <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
<       <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
<       <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
---
>         <command>/gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE/arch_NODA/run_arch.ksh</command>
294,299c226,246
<       <dependency>
<               <and>
<                       <taskdep task="gfsvrfy"/>
<                       <streq><left>&ARCHIVE_TO_HPSS;</left><right>YES</right></streq>
<               </and>
<       </dependency>
---
>         <jobname><cyclestr>&PSLOT;_gfsarch_@H</cyclestr></jobname>
>         <account>&ACCOUNT;</account>
>         <queue>&QUEUE_ARCH_GFS;</queue>
>         &RESOURCES_ARCH_GFS;
>         <walltime>&WALLTIME_ARCH_GFS;</walltime>
>         <memory>&MEMORY_ARCH_GFS;</memory>
>         <native>&NATIVE_ARCH_GFS;</native>
>
>         <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsarch.log</cyclestr></join>
>
>         <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
>         <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
>         <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
>         <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
>         <envar><name>CDUMP</name><value>gfs</value></envar>
>         <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
>         <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
>
>         <dependency>
>                         <taskdep task="gfscopy"/>
>         </dependency>
304a252
>
<?xml version="1.0"?>
<!DOCTYPE workflow
[
        <!--
        PROGRAM
                Main workflow manager for Forecast only Global Forecast System

        AUTHOR:
                Rahul Mahajan
                [email protected]

        NOTES:
                This workflow was automatically generated at 2019-05-03 18:06:01.411290
        -->

        <!-- Experiment parameters such as name, cycle, resolution -->
        <!ENTITY PSLOT    "NODA-2018092300-2018100700">
        <!ENTITY SDATE    "201809230000">
        <!--<!ENTITY EDATE    "201810070000">-->
        <!ENTITY EDATE    "201809230600">
        <!ENTITY CDUMP    "gfs">
        <!ENTITY CASE     "C768">

        <!-- Experiment parameters such as starting, ending dates -->
        <!ENTITY SDATE_GFS    "201809230000">
        <!ENTITY EDATE_GFS    "201810070000">
        <!ENTITY INTERVAL_GFS "06:00:00">

        <!-- Run Envrionment -->
        <!ENTITY RUN_ENVIR "emc">

        <!-- Experiment related directories -->
        <!ENTITY EXPDIR "/gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700">
        <!ENTITY ROTDIR "/gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700">
        <!ENTITY ICSDIR "/gpfs/hps2/ptmp/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/FV3ICS">

        <!-- Directories for driving the workflow -->
        <!ENTITY HOMEgfs  "/gpfs/hps3/emc/meso/save/Donald.E.Lippi/global-workflow-20190306">
        <!ENTITY JOBS_DIR "/gpfs/hps3/emc/meso/save/Donald.E.Lippi/global-workflow-20190306/jobs/rocoto">

        <!-- Machine related entities -->
        <!ENTITY ACCOUNT    "FV3GFS-T2O">
        <!ENTITY QUEUE      "dev">
        <!ENTITY QUEUE_ARCH "dev_transfer">
        <!ENTITY SCHEDULER  "lsfcray">

        <!-- Toggle HPSS archiving -->
        <!ENTITY ARCHIVE_TO_HPSS "YES">

        <!-- ROCOTO parameters that control workflow -->
        <!ENTITY CYCLETHROTTLE "1">
        <!ENTITY TASKTHROTTLE  "40">
        <!ENTITY MAXTRIES      "1">

        <!-- BEGIN: Resource requirements for the workflow -->
        <!ENTITY QUEUE_FCST_GDAS     "&QUEUE;">
        <!ENTITY WALLTIME_FCST_GDAS  "01:00:00">
        <!ENTITY RESOURCES_FCST_GDAS "<nodes>108:ppn=12</nodes>">
        <!ENTITY MEMORY_FCST_GDAS    "1024M">
        <!ENTITY NATIVE_FCST_GDAS    "">

        <!ENTITY QUEUE_FCST_GFS     "&QUEUE;">
        <!ENTITY WALLTIME_FCST_GFS  "06:00:00">
        <!ENTITY RESOURCES_FCST_GFS "<nodes>108:ppn=12</nodes>">
        <!ENTITY MEMORY_FCST_GFS    "1024M">
        <!ENTITY NATIVE_FCST_GFS    "">

        <!ENTITY QUEUE_POST_GFS     "&QUEUE;">
        <!ENTITY WALLTIME_POST_GFS  "01:15:00">
        <!ENTITY RESOURCES_POST_GFS "<nodes>7:ppn=12</nodes>">
        <!ENTITY MEMORY_POST_GFS    "3072M">
        <!ENTITY NATIVE_POST_GFS    "">

        <!ENTITY QUEUE_ARCH_GFS     "&QUEUE_ARCH;">
        <!ENTITY WALLTIME_ARCH_GFS  "06:00:00">
        <!ENTITY WALLTIME_CLEAN_GFS  "00:20:00">
        <!ENTITY RESOURCES_ARCH_GFS "<nodes>1:ppn=1</nodes><shared></shared>">
        <!ENTITY MEMORY_ARCH_GFS    "2048M">
        <!ENTITY NATIVE_ARCH_GFS    "">

        <!-- END: Resource requirements for the workflow -->

]>

<workflow realtime="F" scheduler="&SCHEDULER;" cyclethrottle="&CYCLETHROTTLE;" taskthrottle="&TASKTHROTTLE;">

        <log verbosity="10"><cyclestr>&EXPDIR;/logs/@Y@m@[email protected]</cyclestr></log>

        <!-- Define the cycles -->
        <cycledef group="gdas" >&SDATE;     &EDATE;     06:00:00</cycledef>
        <cycledef group="gfs"  >&SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS;</cycledef>

<task name="gdasfcst" cycledefs="gdas" maxtries="&MAXTRIES;">

        <command>&JOBS_DIR;/fcst.sh</command>

        <jobname><cyclestr>&PSLOT;_gdasfcst_@H</cyclestr></jobname>
        <account>&ACCOUNT;</account>
        <queue>&QUEUE_FCST_GDAS;</queue>
        &RESOURCES_FCST_GDAS;
        <walltime>&WALLTIME_FCST_GDAS;</walltime>
        <memory>&MEMORY_FCST_GDAS;</memory>
        <native>&NATIVE_FCST_GDAS;</native>

        <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gdasfcst.log</cyclestr></join>

        <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
        <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
        <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
        <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
        <envar><name>CDUMP</name><value>gdas</value></envar>
        <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
        <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>

        <dependency>
            <datadep><cyclestr offset="-6:00:00">&ROTDIR;/gdas.@Y@m@d/@H/RESTART</cyclestr></datadep>
        </dependency>

</task>


<task name="gfsfcst" cycledefs="gfs" maxtries="&MAXTRIES;">

        <command>&JOBS_DIR;/fcst.sh</command>

        <jobname><cyclestr>&PSLOT;_gfsfcst_@H</cyclestr></jobname>
        <account>&ACCOUNT;</account>
        <queue>&QUEUE_FCST_GFS;</queue>
        &RESOURCES_FCST_GFS;
        <walltime>&WALLTIME_FCST_GFS;</walltime>
        <memory>&MEMORY_FCST_GFS;</memory>
        <native>&NATIVE_FCST_GFS;</native>

        <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsfcst.log</cyclestr></join>

        <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
        <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
        <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
        <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
        <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
        <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
        <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>

        <dependency>
            <datadep><cyclestr offset="-6:00:00">&ROTDIR;/gdas.@Y@m@d/@H/RESTART</cyclestr></datadep>
        </dependency>

</task>

<metatask name="gfspost">

        <var name="grp">001 002 003 004 005 006 007 008 009 010 011 012 013 014 015 016 017 018 019 020 021 022 023 024 025 026 027 028 029 030 031 032 033 034 035 036 037 038 039 040 041 042 043 044 045 046 047 048 049 050 051 052 053 054 055 056 057 058 059 060 061 062 063 064 065 066 067 068 069 070 071 072 073 074 075 076 077 078 079 080 081 082 083 084 085</var>

        <var name="dep">f001 f003 f005 f007 f009 f011 f013 f015 f017 f019 f021 f023 f025 f027 f029 f031 f033 f035 f037 f039 f041 f043 f045 f047 f049 f051 f053 f055 f057 f059 f061 f063 f065 f067 f069 f071 f073 f075 f077 f079 f081 f083 f085 f087 f089 f091 f093 f095 f097 f099 f101 f103 f105 f107 f109 f111 f113 f115 f117 f119 f121 f123 f125 f127 f129 f131 f133 f135 f137 f139 f141 f143 f145 f147 f149 f151 f153 f155 f157 f159 f161 f163 f165 f167 f168</var>

        <var name="lst">f000_f001 f002_f003 f004_f005 f006_f007 f008_f009 f010_f011 f012_f013 f014_f015 f016_f017 f018_f019 f020_f021 f022_f023 f024_f025 f026_f027 f028_f029 f030_f031 f032_f033 f034_f035 f036_f037 f038_f039 f040_f041 f042_f043 f044_f045 f046_f047 f048_f049 f050_f051 f052_f053 f054_f055 f056_f057 f058_f059 f060_f061 f062_f063 f064_f065 f066_f067 f068_f069 f070_f071 f072_f073 f074_f075 f076_f077 f078_f079 f080_f081 f082_f083 f084_f085 f086_f087 f088_f089 f090_f091 f092_f093 f094_f095 f096_f097 f098_f099 f100_f101 f102_f103 f104_f105 f106_f107 f108_f109 f110_f111 f112_f113 f114_f115 f116_f117 f118_f119 f120_f121 f122_f123 f124_f125 f126_f127 f128_f129 f130_f131 f132_f133 f134_f135 f136_f137 f138_f139 f140_f141 f142_f143 f144_f145 f146_f147 f148_f149 f150_f151 f152_f153 f154_f155 f156_f157 f158_f159 f160_f161 f162_f163 f164_f165 f166_f167 f168</var>


        <task name="gfspost#grp#" cycledefs="gfs" maxtries="&MAXTRIES;">

                <command>&JOBS_DIR;/post.sh</command>

                <jobname><cyclestr>&PSLOT;_gfspost#grp#_@H</cyclestr></jobname>
                <account>&ACCOUNT;</account>
                <queue>&QUEUE_POST_GFS;</queue>
                &RESOURCES_POST_GFS;
                <walltime>&WALLTIME_POST_GFS;</walltime>
                <memory>&MEMORY_POST_GFS;</memory>
                <native>&NATIVE_POST_GFS;</native>

                <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log</cyclestr></join>

                <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
                <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
                <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
                <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
                <envar><name>CDUMP</name><value>&CDUMP;</value></envar>
                <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
                <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>
                <envar><name>FHRGRP</name><value>#grp#</value></envar>
                <envar><name>FHRLST</name><value>#lst#</value></envar>
                <envar><name>ROTDIR</name><value>&ROTDIR;</value></envar>

                <dependency>
                        <datadep><cyclestr>&ROTDIR;/gfs.@Y@m@d/@H/[email protected]#dep#.nemsio</cyclestr></datadep>
                </dependency>

        </task>

</metatask>

<task name="gfscopy" cycledefs="gfs" maxtries="&MAXTRIES;">

        <command>/gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE/arch_NODA/run_copy.ksh</command>

        <jobname><cyclestr>&PSLOT;_gfscopy_@H</cyclestr></jobname>
        <account>&ACCOUNT;</account>
        <queue>&QUEUE_ARCH_GFS;</queue>
        &RESOURCES_ARCH_GFS;
        <walltime>&WALLTIME_ARCH_GFS;</walltime>
        <memory>&MEMORY_ARCH_GFS;</memory>
        <native>&NATIVE_ARCH_GFS;</native>

        <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfscopy.log</cyclestr></join>

        <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
        <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
        <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
        <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
        <envar><name>CDUMP</name><value>gfs</value></envar>
        <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
        <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>

        <!--
        -->
        <dependency>
                        <metataskdep metatask="gfspost"/>
        </dependency>

</task>

<task name="gfsarch" cycledefs="gfs" maxtries="&MAXTRIES;">

        <command>/gpfs/hps3/emc/meso/save/Donald.E.Lippi/PhD-globalRadarOSSE/arch_NODA/run_arch.ksh</command>

        <jobname><cyclestr>&PSLOT;_gfsarch_@H</cyclestr></jobname>
        <account>&ACCOUNT;</account>
        <queue>&QUEUE_ARCH_GFS;</queue>
        &RESOURCES_ARCH_GFS;
        <walltime>&WALLTIME_ARCH_GFS;</walltime>
        <memory>&MEMORY_ARCH_GFS;</memory>
        <native>&NATIVE_ARCH_GFS;</native>

        <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/gfsarch.log</cyclestr></join>

        <envar><name>RUN_ENVIR</name><value>&RUN_ENVIR;</value></envar>
        <envar><name>HOMEgfs</name><value>&HOMEgfs;</value></envar>
        <envar><name>EXPDIR</name><value>&EXPDIR;</value></envar>
        <envar><name>CDATE</name><value><cyclestr>@Y@m@d@H</cyclestr></value></envar>
        <envar><name>CDUMP</name><value>gfs</value></envar>
        <envar><name>PDY</name><value><cyclestr>@Y@m@d</cyclestr></value></envar>
        <envar><name>cyc</name><value><cyclestr>@H</cyclestr></value></envar>

        <dependency>
                        <taskdep task="gfscopy"/>
        </dependency>

</task>


</workflow>

Submit jobs

The archive step was built into the workflow!!

cd /gpfs/hps3/emc/meso/noscrub/Donald.E.Lippi/fv3gfs_dl2rw/2018092300/NODA-2018092300-2018100700
crontab NODA-2018092300-2018100700.crontab
  • gdasfcst [62GB @ 6hrly @ 6hrs @ C768]
  • gfsfcst [3.2TB @ 1hrly @168hrs @ C768]
  • gfspost [2 jobs per task - 85 tasks; 441GB @ master + 0p25]
  • gfscopy
  • gfsarch
  • gfsmet

https://thisdavej.com/copy-table-in-excel-and-paste-as-a-markdown-table/

| runtimes   | median  | mean    | std dev |
|------------|---------|---------|---------|
| gdasfcst   | 0:10:34 | 0:11:26 | 0:01:40 |
| gfsfcst    | 3:03:15 | 3:09:15 | 0:30:29 |
| gfspost    | 0:22:56 | 0:23:55 | 0:06:47 |
| gfscopy    | 0:19:15 | 0:21:14 | 0:06:55 |
| gfsarch    | 3:53:42 | 4:10:38 | 1:22:49 |
| sub totals | 7:11:10 | 8:16:12 | 1:50:50 |
all times are in hh:mm:ss
⚠️ **GitHub.com Fallback** ⚠️