|
| 1 | +<?xml version="1.0"?> |
| 2 | +<!DOCTYPE workflow |
| 3 | +[ |
| 4 | + <!-- Experiment parameters such as name, cycle, resolution --> |
| 5 | + <!ENTITY PSLOT "rt_v17p8_ugwpv1_c3_mynn"> |
| 6 | + <!ENTITY CDUMP "gfs"> |
| 7 | + <!ENTITY CASE "C768"> |
| 8 | + <!ENTITY COMPONENT "atmos"> |
| 9 | + <!ENTITY RES "0p25"> |
| 10 | + <!ENTITY FCST_LENGTH "120"> |
| 11 | + |
| 12 | + <!-- Experiment parameters such as starting, ending dates --> |
| 13 | + <!ENTITY SDATE "202403210000"> |
| 14 | + <!ENTITY EDATE "202403300000"> |
| 15 | + <!ENTITY INTERVAL "24:00:00"> |
| 16 | + |
| 17 | + <!-- Run Envrionment --> |
| 18 | + <!ENTITY RUN_ENVIR "emc"> |
| 19 | + |
| 20 | + <!-- Directories for driving the workflow --> |
| 21 | + <!ENTITY HOMEgfs "/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1"> |
| 22 | + <!ENTITY JOBS_DIR "&HOMEgfs;/jobs/rocoto"> |
| 23 | + |
| 24 | + <!-- Experiment related directories --> |
| 25 | + <!ENTITY EXPDIR "&HOMEgfs;/FV3GFSwfm/&PSLOT;"> |
| 26 | + <!ENTITY ROTDIR "&HOMEgfs;/FV3GFSrun/&PSLOT;"> |
| 27 | + <!ENTITY PYGRAFDIR "/scratch1/BMC/gsd-fv3/rtruns/pygraf"> |
| 28 | + |
| 29 | + <!-- Machine related entities --> |
| 30 | + <!ENTITY ACCOUNT "gsd-fv3"> |
| 31 | + <!ENTITY QUEUE "batch"> |
| 32 | + <!ENTITY PARTITION_BATCH "hera"> |
| 33 | + <!ENTITY SCHEDULER "slurm"> |
| 34 | + |
| 35 | + <!-- ROCOTO parameters that control workflow --> |
| 36 | + <!ENTITY CYCLETHROTTLE "6"> |
| 37 | + <!ENTITY TASKTHROTTLE "45"> |
| 38 | + <!ENTITY MAXTRIES "2"> |
| 39 | + |
| 40 | + <!-- BEGIN: Resource requirements for the workflow --> |
| 41 | + |
| 42 | + <!ENTITY QUEUE_PYTHON "&QUEUE;"> |
| 43 | + <!ENTITY PARTITION_PYTHON "&PARTITION_BATCH;"> |
| 44 | + <!ENTITY WALLTIME_PYTHON "01:30:00"> |
| 45 | + <!ENTITY RESOURCES_PYTHON "1:ppn=12:tpp=1"> |
| 46 | + <!ENTITY MEMORY_PYTHON "40G"> |
| 47 | + <!ENTITY NATIVE_PYTHON "--export=NONE"> |
| 48 | + |
| 49 | + <!-- END: Resource requirements for the workflow --> |
| 50 | + |
| 51 | +]> |
| 52 | + |
| 53 | +<workflow realtime="T" scheduler="&SCHEDULER;" cyclethrottle="&CYCLETHROTTLE;" taskthrottle="&TASKTHROTTLE;" > |
| 54 | + |
| 55 | + <log verbosity="10"><cyclestr>&EXPDIR;/logs/@Y@m@d@H.log</cyclestr></log> |
| 56 | + |
| 57 | + <!-- Define the cycles --> |
| 58 | + <cycledef group="gfs">&SDATE; &EDATE; &INTERVAL;</cycledef> |
| 59 | + |
| 60 | + <metatask name="remapgrib" throttle="58"> |
| 61 | + |
| 62 | + <var name="fcst"> 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 </var> |
| 63 | + <var name="T"> 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 </var> |
| 64 | + |
| 65 | + <task name="remapgrib_#T#" cycledefs="gfs" maxtries="4"> |
| 66 | + <command>&JOBS_DIR;/remapgrib.ksh</command> |
| 67 | + <account>&ACCOUNT;</account> |
| 68 | + <cores>1</cores> |
| 69 | + <walltime>00:15:00</walltime> |
| 70 | + <jobname><cyclestr>remapgrib_#T#_&PSLOT;</cyclestr></jobname> |
| 71 | + <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log</cyclestr></join> |
| 72 | + <envar><name>ROTDIR</name><value>&ROTDIR;</value></envar> |
| 73 | + <envar><name>CDUMP</name><value>&CDUMP;</value></envar> |
| 74 | + <envar><name>COMPONENT</name><value>&COMPONENT;</value></envar> |
| 75 | + <envar><name>yyyymmdd</name><value><cyclestr>@Y@m@d</cyclestr></value></envar> |
| 76 | + <envar><name>hh</name><value><cyclestr>@H</cyclestr></value></envar> |
| 77 | + <envar><name>fcst</name><value>#T#</value></envar> |
| 78 | + <envar><name>GRID_NAMES</name><value>201D130D242</value></envar> |
| 79 | + <dependency> |
| 80 | + <datadep minsize="1b" age="120"> <cyclestr>&ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T#</cyclestr></datadep> |
| 81 | + </dependency> |
| 82 | + </task> |
| 83 | + |
| 84 | + </metatask> |
| 85 | + |
| 86 | + <metatask> |
| 87 | + |
| 88 | + <var name="GRID_ID">full 242 130 201</var> |
| 89 | + <var name="TILESET">full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi</var> |
| 90 | + <var name="IMGFILE">global.yml globalAK.yml globalCONUS.yml globalNHemi.yml</var> |
| 91 | + |
| 92 | + <task name="gfspygraf_#GRID_ID#" cycledefs="gfs" maxtries="&MAXTRIES;"> |
| 93 | + |
| 94 | + <command> |
| 95 | + <cyclestr> |
| 96 | + source &PYGRAFDIR;/pre.sh; |
| 97 | + cd &PYGRAFDIR;; |
| 98 | + python &PYGRAFDIR;/create_graphics.py \ |
| 99 | + maps \ |
| 100 | + -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ |
| 101 | + -f 0 &FCST_LENGTH; 6 \ |
| 102 | + --file_type prs \ |
| 103 | + --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ |
| 104 | + --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ |
| 105 | + -m "GFSv17p8_UGWPV1_C3_MYNN" \ |
| 106 | + -n ${SLURM_CPUS_ON_NODE:-12} \ |
| 107 | + -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ |
| 108 | + -s @Y@m@d@H \ |
| 109 | + --tiles "#TILESET#" \ |
| 110 | + -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img |
| 111 | + </cyclestr> |
| 112 | + </command> |
| 113 | + <account>&ACCOUNT;</account> |
| 114 | + <queue>&QUEUE;</queue> |
| 115 | + <nodes>&RESOURCES_PYTHON;</nodes> |
| 116 | + <walltime>&WALLTIME_PYTHON;</walltime> |
| 117 | + <native>--exclusive</native> |
| 118 | + <jobname><cyclestr>FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn</cyclestr></jobname> |
| 119 | + <join><cyclestr>&ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log</cyclestr></join> |
| 120 | + |
| 121 | + <dependency> |
| 122 | +<!-- <or> |
| 123 | + <timedep><cyclestr offset="030:00:00">@Y@m@d@H@M00</cyclestr></timedep> |
| 124 | + <metataskdep metatask="remapgrib"/> |
| 125 | + </or> |
| 126 | +--> |
| 127 | + <metataskdep metatask="remapgrib"/> |
| 128 | + </dependency> |
| 129 | + |
| 130 | + </task> |
| 131 | + |
| 132 | + </metatask> |
| 133 | + |
| 134 | +</workflow> |
0 commit comments